[ 657.682944] env[68443]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68443) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 657.683407] env[68443]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68443) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 657.683407] env[68443]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68443) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 657.683752] env[68443]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 657.772508] env[68443]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68443) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 657.782435] env[68443]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68443) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 657.920331] env[68443]: INFO nova.virt.driver [None req-bcc455ec-88c4-483e-941f-e79f8ee9c9d4 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 657.993053] env[68443]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.993199] env[68443]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.993308] env[68443]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68443) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 661.129248] env[68443]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-f7c0d30c-89c1-4bde-a79e-0fb2bf4ba71a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.144878] env[68443]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68443) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 661.145030] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-444576f5-3f26-485e-a118-32d8642c2336 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.179407] env[68443]: INFO oslo_vmware.api [-] Successfully established new session; session ID is ef23c. [ 661.179573] env[68443]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.186s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.180117] env[68443]: INFO nova.virt.vmwareapi.driver [None req-bcc455ec-88c4-483e-941f-e79f8ee9c9d4 None None] VMware vCenter version: 7.0.3 [ 661.183558] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb61b858-0311-4bc2-8cda-e57c30c444f1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.201094] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6d36c5-c02d-48c2-ba6b-e95bd6d9f02b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.207293] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592d2658-4d49-4e6d-8ea6-2f78abb3e935 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.213856] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4d8309-631a-421e-a561-a82dd36514b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.226812] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095c05b1-9b0d-4663-9ace-94bb33badf40 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.232593] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d0cb56-e5df-4ce4-ab85-6b23a1c829d8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.262748] env[68443]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-78e7f026-fb3e-478f-adb9-3aaf678cdc87 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.267735] env[68443]: DEBUG nova.virt.vmwareapi.driver [None req-bcc455ec-88c4-483e-941f-e79f8ee9c9d4 None None] Extension org.openstack.compute already exists. {{(pid=68443) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 661.270385] env[68443]: INFO nova.compute.provider_config [None req-bcc455ec-88c4-483e-941f-e79f8ee9c9d4 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 661.288261] env[68443]: DEBUG nova.context [None req-bcc455ec-88c4-483e-941f-e79f8ee9c9d4 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),bea5f394-f1c8-45fe-9877-cee68ce56f2c(cell1) {{(pid=68443) load_cells /opt/stack/nova/nova/context.py:464}} [ 661.290091] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.290314] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.290949] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.291376] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Acquiring lock "bea5f394-f1c8-45fe-9877-cee68ce56f2c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.291572] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Lock "bea5f394-f1c8-45fe-9877-cee68ce56f2c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.292561] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Lock "bea5f394-f1c8-45fe-9877-cee68ce56f2c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.312676] env[68443]: INFO dbcounter [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Registered counter for database nova_cell0 [ 661.320894] env[68443]: INFO dbcounter [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Registered counter for database nova_cell1 [ 661.323974] env[68443]: DEBUG oslo_db.sqlalchemy.engines [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68443) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 661.324349] env[68443]: DEBUG oslo_db.sqlalchemy.engines [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68443) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 661.328907] env[68443]: DEBUG dbcounter [-] [68443] Writer thread running {{(pid=68443) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 661.329639] env[68443]: DEBUG dbcounter [-] [68443] Writer thread running {{(pid=68443) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 661.332696] env[68443]: ERROR nova.db.main.api [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 661.332696] env[68443]: result = function(*args, **kwargs) [ 661.332696] env[68443]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.332696] env[68443]: return func(*args, **kwargs) [ 661.332696] env[68443]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 661.332696] env[68443]: result = fn(*args, **kwargs) [ 661.332696] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 661.332696] env[68443]: return f(*args, **kwargs) [ 661.332696] env[68443]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 661.332696] env[68443]: return db.service_get_minimum_version(context, binaries) [ 661.332696] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 661.332696] env[68443]: _check_db_access() [ 661.332696] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 661.332696] env[68443]: stacktrace = ''.join(traceback.format_stack()) [ 661.332696] env[68443]: [ 661.333506] env[68443]: ERROR nova.db.main.api [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 661.333506] env[68443]: result = function(*args, **kwargs) [ 661.333506] env[68443]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.333506] env[68443]: return func(*args, **kwargs) [ 661.333506] env[68443]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 661.333506] env[68443]: result = fn(*args, **kwargs) [ 661.333506] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 661.333506] env[68443]: return f(*args, **kwargs) [ 661.333506] env[68443]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 661.333506] env[68443]: return db.service_get_minimum_version(context, binaries) [ 661.333506] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 661.333506] env[68443]: _check_db_access() [ 661.333506] env[68443]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 661.333506] env[68443]: stacktrace = ''.join(traceback.format_stack()) [ 661.333506] env[68443]: [ 661.334230] env[68443]: WARNING nova.objects.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 661.334230] env[68443]: WARNING nova.objects.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Failed to get minimum service version for cell bea5f394-f1c8-45fe-9877-cee68ce56f2c [ 661.334398] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Acquiring lock "singleton_lock" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.334559] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Acquired lock "singleton_lock" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.334817] env[68443]: DEBUG oslo_concurrency.lockutils [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Releasing lock "singleton_lock" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.335148] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Full set of CONF: {{(pid=68443) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 661.335296] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ******************************************************************************** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 661.335456] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] Configuration options gathered from: {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 661.335595] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 661.335782] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 661.335914] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ================================================================================ {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 661.336146] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] allow_resize_to_same_host = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.336336] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] arq_binding_timeout = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.336491] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] backdoor_port = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.336629] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] backdoor_socket = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.336814] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] block_device_allocate_retries = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.336998] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] block_device_allocate_retries_interval = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337164] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cert = self.pem {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337333] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337501] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute_monitors = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337669] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] config_dir = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337849] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] config_drive_format = iso9660 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.337981] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338158] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] config_source = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338327] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] console_host = devstack {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338493] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] control_exchange = nova {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338656] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cpu_allocation_ratio = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338817] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] daemon = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.338985] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] debug = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.339159] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_access_ip_network_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.339329] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_availability_zone = nova {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.339488] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_ephemeral_format = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.339653] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_green_pool_size = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.339888] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.340072] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] default_schedule_zone = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.340237] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] disk_allocation_ratio = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.340400] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] enable_new_services = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.340580] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] enabled_apis = ['osapi_compute'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.340748] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] enabled_ssl_apis = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341197] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] flat_injected = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341197] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] force_config_drive = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341302] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] force_raw_images = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341392] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] graceful_shutdown_timeout = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341551] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] heal_instance_info_cache_interval = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341767] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] host = cpu-1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.341946] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.342124] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.342289] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.342535] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.342707] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_build_timeout = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.342870] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_delete_interval = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343050] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_format = [instance: %(uuid)s] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343224] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_name_template = instance-%08x {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343392] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_usage_audit = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343560] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_usage_audit_period = month {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343730] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.343901] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344082] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] internal_service_availability_zone = internal {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344246] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] key = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344410] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] live_migration_retry_count = 30 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344576] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_config_append = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344743] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.344906] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_dir = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345075] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345207] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_options = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345370] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_rotate_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345569] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_rotate_interval_type = days {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345748] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] log_rotation_type = none {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.345881] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346014] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346195] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346362] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346501] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346686] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] long_rpc_timeout = 1800 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.346857] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_concurrent_builds = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347068] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_concurrent_live_migrations = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347190] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_concurrent_snapshots = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_local_block_devices = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347512] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_logfile_count = 30 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347675] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] max_logfile_size_mb = 200 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.347835] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] maximum_instance_delete_attempts = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348020] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metadata_listen = 0.0.0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348185] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metadata_listen_port = 8775 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metadata_workers = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348511] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] migrate_max_retries = -1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348680] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] mkisofs_cmd = genisoimage {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.348884] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349030] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] my_ip = 10.180.1.21 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349201] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] network_allocate_retries = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349381] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349552] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349717] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] osapi_compute_listen_port = 8774 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.349885] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] osapi_compute_unique_server_name_scope = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350066] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] osapi_compute_workers = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350234] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] password_length = 12 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350398] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] periodic_enable = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350560] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] periodic_fuzzy_delay = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350731] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] pointer_model = usbtablet {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.350898] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] preallocate_images = none {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351070] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] publish_errors = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351204] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] pybasedir = /opt/stack/nova {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351363] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ram_allocation_ratio = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351524] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rate_limit_burst = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351693] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rate_limit_except_level = CRITICAL {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.351854] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rate_limit_interval = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352023] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reboot_timeout = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352183] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reclaim_instance_interval = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352343] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] record = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352512] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reimage_timeout_per_gb = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352681] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] report_interval = 120 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.352841] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rescue_timeout = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353014] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reserved_host_cpus = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353180] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reserved_host_disk_mb = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353342] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reserved_host_memory_mb = 512 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353506] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] reserved_huge_pages = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353667] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] resize_confirm_window = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353828] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] resize_fs_using_block_device = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.353985] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] resume_guests_state_on_host_boot = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354168] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354331] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] rpc_response_timeout = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354491] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] run_external_periodic_tasks = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354662] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] running_deleted_instance_action = reap {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354823] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.354981] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] running_deleted_instance_timeout = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.355153] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler_instance_sync_interval = 120 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.355322] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_down_time = 720 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.355522] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] servicegroup_driver = db {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.355674] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] shelved_offload_time = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.355839] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] shelved_poll_interval = 3600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356017] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] shutdown_timeout = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] source_is_ipv6 = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356347] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ssl_only = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356608] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356790] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] sync_power_state_interval = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.356958] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] sync_power_state_pool_size = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357162] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] syslog_log_facility = LOG_USER {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357301] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] tempdir = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357462] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] timeout_nbd = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357631] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] transport_url = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357795] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] update_resources_interval = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.357953] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_cow_images = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358126] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_eventlog = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358289] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_journal = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358446] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_json = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358605] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_rootwrap_daemon = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358767] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_stderr = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.358920] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] use_syslog = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359086] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vcpu_pin_set = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359255] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plugging_is_fatal = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359423] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plugging_timeout = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359594] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] virt_mkfs = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359756] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] volume_usage_poll_interval = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.359915] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] watch_log_file = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.360094] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] web = /usr/share/spice-html5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 661.360280] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_concurrency.disable_process_locking = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.360590] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.360785] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.360945] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361132] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361305] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361471] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361654] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.auth_strategy = keystone {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361822] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.compute_link_prefix = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.361999] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.362189] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.dhcp_domain = novalocal {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.362359] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.enable_instance_password = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.362526] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.glance_link_prefix = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.362694] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.362942] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363075] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.instance_list_per_project_cells = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363242] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.list_records_by_skipping_down_cells = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363405] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.local_metadata_per_cell = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363573] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.max_limit = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363743] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.metadata_cache_expiration = 15 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.363917] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.neutron_default_tenant_id = default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364095] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.use_neutron_default_nets = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364268] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364433] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364600] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364777] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.364947] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_dynamic_targets = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.365127] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_jsonfile_path = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.365314] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.365530] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.backend = dogpile.cache.memcached {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.365711] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.backend_argument = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.365886] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.config_prefix = cache.oslo {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366071] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.dead_timeout = 60.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366242] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.debug_cache_backend = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366404] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.enable_retry_client = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366574] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.enable_socket_keepalive = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366765] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.enabled = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.366933] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.enforce_fips_mode = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367112] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.expiration_time = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367279] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.hashclient_retry_attempts = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367446] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367610] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_dead_retry = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367773] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_password = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.367936] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368109] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368276] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_pool_maxsize = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368438] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368602] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_sasl_enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368786] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.368954] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369134] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.memcache_username = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369304] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.proxies = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369468] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_password = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369640] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369820] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.369989] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_server = localhost:6379 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.370171] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_socket_timeout = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.370333] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.redis_username = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.370501] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.retry_attempts = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.370674] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.retry_delay = 0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.370835] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.socket_keepalive_count = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371008] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.socket_keepalive_idle = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371182] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.socket_keepalive_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371342] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.tls_allowed_ciphers = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371502] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.tls_cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371662] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.tls_certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371823] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.tls_enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.371982] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cache.tls_keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.372169] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.372347] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.auth_type = password {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.372511] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.372687] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.372850] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373021] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.cross_az_attach = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.debug = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373511] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.endpoint_template = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373676] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.http_retries = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.373840] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374000] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374190] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.os_region_name = RegionOne {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374358] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374522] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cinder.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374698] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.374859] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.cpu_dedicated_set = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375092] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.cpu_shared_set = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375195] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.image_type_exclude_list = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375360] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375548] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375729] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.375897] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.376083] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.376254] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.resource_provider_association_refresh = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.376420] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.376605] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.shutdown_retry_interval = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.376809] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377007] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] conductor.workers = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377196] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] console.allowed_origins = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377361] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] console.ssl_ciphers = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377566] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] console.ssl_minimum_version = default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377758] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] consoleauth.enforce_session_timeout = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.377934] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] consoleauth.token_ttl = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378116] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378283] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378451] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378615] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378778] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.378939] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379119] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379314] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379450] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379613] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379773] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.379932] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380106] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380282] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.service_type = accelerator {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380447] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380606] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380771] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.380928] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381118] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381283] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] cyborg.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381463] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.backend = sqlalchemy {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381634] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.connection = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381805] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.connection_debug = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.381978] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.connection_parameters = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382161] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.connection_recycle_time = 3600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382328] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.connection_trace = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382494] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.db_inc_retry_interval = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382664] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.db_max_retries = 20 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382828] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.db_max_retry_interval = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.382993] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.db_retry_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.383172] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.max_overflow = 50 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.383337] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.max_pool_size = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.383531] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.max_retries = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.383713] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.383878] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.mysql_wsrep_sync_wait = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384051] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.pool_timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384224] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.retry_interval = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384385] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.slave_connection = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384550] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.sqlite_synchronous = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384718] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] database.use_db_reconnect = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.384897] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.backend = sqlalchemy {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385080] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.connection = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385268] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.connection_debug = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385444] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.connection_parameters = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385640] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.connection_recycle_time = 3600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385813] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.connection_trace = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.385979] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.db_inc_retry_interval = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.386587] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.db_max_retries = 20 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.386587] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.db_max_retry_interval = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.386587] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.db_retry_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.386742] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.max_overflow = 50 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.386835] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.max_pool_size = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387008] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.max_retries = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387362] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387523] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.pool_timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387688] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.retry_interval = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.387847] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.slave_connection = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388017] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] api_database.sqlite_synchronous = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388194] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] devices.enabled_mdev_types = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388371] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388542] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388709] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ephemeral_storage_encryption.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.388872] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389051] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.api_servers = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389219] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389380] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389571] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389761] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.389925] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390105] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.debug = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390277] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.default_trusted_certificate_ids = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390442] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.enable_certificate_validation = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390607] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.enable_rbd_download = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390770] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.390938] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391117] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391282] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391445] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391612] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.num_retries = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391786] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.rbd_ceph_conf = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.391950] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.rbd_connect_timeout = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392135] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.rbd_pool = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392307] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.rbd_user = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392469] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392642] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392793] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.392962] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.service_type = image {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.393140] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.393301] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.393486] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.393659] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.393842] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.394024] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.verify_glance_signatures = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.394192] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] glance.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.394364] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] guestfs.debug = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.394541] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] mks.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.394895] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.395156] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.manager_interval = 2400 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.395351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.precache_concurrency = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.395550] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.remove_unused_base_images = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.395736] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.395908] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396098] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] image_cache.subdirectory_name = _base {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396283] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.api_max_retries = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396452] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.api_retry_interval = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396617] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396785] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.auth_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.396947] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397122] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397292] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397457] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.conductor_group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397617] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397778] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.397938] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398113] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398276] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398438] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398599] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398766] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.peer_list = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.398925] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399095] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399262] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.serial_console_state_timeout = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399422] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399593] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.service_type = baremetal {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399758] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.shard = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.399921] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400091] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400256] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400414] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400594] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400756] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ironic.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.400939] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.401124] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] key_manager.fixed_key = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.401308] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.401470] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.barbican_api_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.401661] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.barbican_endpoint = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.401853] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.barbican_endpoint_type = public {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402027] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.barbican_region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402197] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402356] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402521] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402686] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.402845] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403022] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.number_of_retries = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403189] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.retry_delay = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403354] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.send_service_user_token = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403516] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403674] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403834] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.verify_ssl = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.403990] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican.verify_ssl_path = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.404171] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.404340] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.auth_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.404500] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.404660] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.404868] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405054] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405223] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405385] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405564] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] barbican_service_user.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405839] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.approle_role_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.405920] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.approle_secret_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406071] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406241] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406405] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406566] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406730] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.406904] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.kv_mountpoint = secret {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407073] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.kv_path = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407245] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.kv_version = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407407] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.namespace = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407568] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.root_token_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407735] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.407895] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.ssl_ca_crt_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408065] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408231] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.use_ssl = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408408] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408579] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408747] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.auth_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.408908] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409080] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409251] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409409] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409601] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409808] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.409982] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.410160] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.410324] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.410519] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.410689] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.410934] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.411234] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.411513] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.service_type = identity {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.411713] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.411885] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412068] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412236] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412420] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412586] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] keystone.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412792] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.connection_uri = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.412957] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_mode = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.413139] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.413314] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_models = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.413526] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_power_governor_high = performance {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.413717] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.413884] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_power_management = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414069] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414245] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.device_detach_attempts = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414411] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.device_detach_timeout = 20 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414578] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.disk_cachemodes = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414741] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.disk_prefix = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.414906] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.enabled_perf_events = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415083] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.file_backed_memory = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415251] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.gid_maps = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415413] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.hw_disk_discard = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415594] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.hw_machine_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415776] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_rbd_ceph_conf = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.415945] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416123] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416296] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_rbd_glance_store_name = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416468] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_rbd_pool = rbd {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416640] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_type = default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416801] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.images_volume_group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.416965] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.inject_key = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417143] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.inject_partition = -2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417309] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.inject_password = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417475] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.iscsi_iface = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417642] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.iser_use_multipath = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417807] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.417974] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418155] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_downtime = 500 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418322] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418488] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418651] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_inbound_addr = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418813] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.418971] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419151] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_scheme = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419328] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_timeout_action = abort {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419494] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_tunnelled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419658] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_uri = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419821] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.live_migration_with_native_tls = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.419981] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.max_queues = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.420158] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.420388] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.420553] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.nfs_mount_options = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.420846] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421024] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421199] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421362] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421530] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421697] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_pcie_ports = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.421868] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.422044] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.pmem_namespaces = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.422212] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.quobyte_client_cfg = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.422491] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.422670] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.422839] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423015] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rbd_secret_uuid = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423350] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rbd_user = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423543] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423723] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.423885] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rescue_image_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424056] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rescue_kernel_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424222] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rescue_ramdisk_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424392] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424552] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.rx_queue_size = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424724] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.smbfs_mount_options = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.424997] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.425184] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.snapshot_compression = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.425349] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.snapshot_image_format = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.425616] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.425801] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.sparse_logical_volumes = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.425966] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.swtpm_enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.426159] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.swtpm_group = tss {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.426335] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.swtpm_user = tss {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.426507] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.sysinfo_serial = unique {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.426699] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.tb_cache_size = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.426870] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.tx_queue_size = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427051] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.uid_maps = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427226] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.use_virtio_for_bridges = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427396] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.virt_type = kvm {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427566] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.volume_clear = zero {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427733] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.volume_clear_size = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.427899] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.volume_use_multipath = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.428068] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_cache_path = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.428241] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.428410] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.428578] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.428781] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429072] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429257] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.vzstorage_mount_user = stack {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429426] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429602] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429782] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.auth_type = password {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.429945] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.430122] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.430289] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.430448] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.430607] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.430829] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.default_floating_pool = public {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431043] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431245] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.extension_sync_interval = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431408] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.http_retries = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431574] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431737] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.431898] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432082] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432251] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432426] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.ovs_bridge = br-int {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432596] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.physnets = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432770] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.region_name = RegionOne {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.432933] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433121] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.service_metadata_proxy = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433287] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433485] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.service_type = network {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433663] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433827] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.433987] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.434167] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.434351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.434517] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] neutron.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.434692] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] notifications.bdms_in_notifications = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.434870] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] notifications.default_level = INFO {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435060] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] notifications.notification_format = unversioned {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435233] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] notifications.notify_on_state_change = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435411] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435621] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] pci.alias = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435798] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] pci.device_spec = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.435969] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] pci.report_in_placement = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.436158] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.436339] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.auth_type = password {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.436511] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.436679] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.436840] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437016] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437349] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437535] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.default_domain_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437724] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.default_domain_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.437892] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.domain_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438069] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.domain_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438235] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438400] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438561] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438723] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.438882] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439063] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.password = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439230] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.project_domain_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439400] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.project_domain_name = Default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439567] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.project_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439745] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.project_name = service {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.439919] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.region_name = RegionOne {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440093] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440258] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440429] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.service_type = placement {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440597] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440766] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.440925] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441097] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.system_scope = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441263] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441421] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.trust_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441583] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.user_domain_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441753] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.user_domain_name = Default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.441912] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.user_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442098] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.username = placement {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442283] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442445] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] placement.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442626] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.cores = 20 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442796] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.count_usage_from_placement = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.442969] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.443162] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.injected_file_content_bytes = 10240 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.443334] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.injected_file_path_length = 255 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.443547] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.injected_files = 5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.443728] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.instances = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.443900] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.key_pairs = 100 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444684] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.metadata_items = 128 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444684] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.ram = 51200 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444684] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.recheck_quota = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444684] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.server_group_members = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444860] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] quota.server_groups = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.444890] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445056] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445228] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.image_metadata_prefilter = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445390] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445582] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.max_attempts = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445759] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.max_placement_results = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.445923] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.446098] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.446265] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.446439] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] scheduler.workers = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.446645] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.446824] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447010] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447188] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447356] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447523] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447689] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.447878] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448074] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.host_subset_size = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448228] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448392] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448557] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448725] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.isolated_hosts = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.448889] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.isolated_images = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449069] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449241] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449415] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449612] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.pci_in_placement = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449800] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.449971] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450152] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450320] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450485] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450653] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450818] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.track_instance_changes = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.450997] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.451183] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metrics.required = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.451351] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metrics.weight_multiplier = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.451517] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.451684] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] metrics.weight_setting = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.451998] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.452193] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.452377] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.port_range = 10000:20000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.452553] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.452732] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.452899] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] serial_console.serialproxy_port = 6083 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.453083] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.453264] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.auth_type = password {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.453449] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.453622] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.453806] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454034] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454213] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454389] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.send_service_user_token = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454557] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454722] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] service_user.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.454909] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.agent_enabled = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.455088] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.455409] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.455638] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456219] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.html5proxy_port = 6082 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456219] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.image_compression = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456219] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.jpeg_compression = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456348] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.playback_compression = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456466] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.server_listen = 127.0.0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456643] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456808] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.streaming_mode = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.456968] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] spice.zlib_compression = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457148] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] upgrade_levels.baseapi = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457323] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] upgrade_levels.compute = auto {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457486] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] upgrade_levels.conductor = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457650] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] upgrade_levels.scheduler = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457818] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.457986] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458165] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458322] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458487] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458651] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458813] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.458977] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459151] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vendordata_dynamic_auth.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459328] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.api_retry_count = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459490] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.ca_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459664] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459833] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.cluster_name = testcl1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.459999] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.connection_pool_size = 10 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.460175] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.console_delay_seconds = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.460348] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.datastore_regex = ^datastore.* {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.460557] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.460735] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.host_password = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.460904] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.host_port = 443 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461087] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.host_username = administrator@vsphere.local {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461260] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.insecure = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461422] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.integration_bridge = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461599] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.maximum_objects = 100 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461798] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.pbm_default_policy = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.461965] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.pbm_enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462141] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.pbm_wsdl_location = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462318] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462482] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.serial_port_proxy_uri = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462643] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.serial_port_service_uri = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462812] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.task_poll_interval = 0.5 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.462985] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.use_linked_clone = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.463172] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.vnc_keymap = en-us {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.463340] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.vnc_port = 5900 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.463505] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vmware.vnc_port_total = 10000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.463694] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.auth_schemes = ['none'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.463872] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.464187] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.464380] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.464554] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.novncproxy_port = 6080 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.464737] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.server_listen = 127.0.0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.464912] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465087] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.vencrypt_ca_certs = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465254] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.vencrypt_client_cert = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465416] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vnc.vencrypt_client_key = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465627] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465803] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_deep_image_inspection = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.465968] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466146] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466309] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466481] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.disable_rootwrap = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466664] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.enable_numa_live_migration = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466829] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.466993] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467169] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467332] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.libvirt_disable_apic = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467494] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467659] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467826] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.467991] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468170] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468335] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468496] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468660] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468824] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.468990] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.469192] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.469367] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.client_socket_timeout = 900 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.469536] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.default_pool_size = 1000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.469713] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.keep_alive = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.469954] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.max_header_line = 16384 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470152] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470322] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.ssl_ca_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470487] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.ssl_cert_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470652] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.ssl_key_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470821] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.tcp_keepidle = 600 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.470998] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.471182] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] zvm.ca_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.471348] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] zvm.cloud_connector_url = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.471638] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.471817] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] zvm.reachable_timeout = 300 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472006] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.enforce_new_defaults = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472190] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.enforce_scope = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472370] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.policy_default_rule = default {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472556] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472735] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.policy_file = policy.yaml {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.472907] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473082] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473250] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473440] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473628] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473805] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.473985] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474180] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.connection_string = messaging:// {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474650] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.enabled = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474650] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.es_doc_type = notification {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474748] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.es_scroll_size = 10000 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474847] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.es_scroll_time = 2m {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.474992] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.filter_error_trace = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.475179] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.hmac_keys = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.475350] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.sentinel_service_name = mymaster {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.475536] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.socket_timeout = 0.1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.475713] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.trace_requests = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.475876] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler.trace_sqlalchemy = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476070] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler_jaeger.process_tags = {} {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476237] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler_jaeger.service_name_prefix = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476403] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] profiler_otlp.service_name_prefix = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476572] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] remote_debug.host = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476733] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] remote_debug.port = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.476915] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477090] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477260] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477428] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477593] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477758] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.477920] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478098] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478269] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478443] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478605] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478776] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.478944] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479127] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479300] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479465] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479630] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479805] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.479968] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480144] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480311] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480477] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480642] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480811] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.480973] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481152] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481318] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481479] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481650] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481817] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.481991] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.482176] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.482341] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.482514] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.482692] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.482858] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483055] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483228] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_notifications.retry = -1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483429] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483623] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483802] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.auth_section = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.483972] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.auth_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484153] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.cafile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484317] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.certfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484483] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.collect_timing = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484646] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.connect_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484805] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.connect_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.484968] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.endpoint_id = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.485144] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.endpoint_override = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.485312] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.insecure = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.485510] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.keyfile = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.485702] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.max_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.485869] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.min_version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486039] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.region_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486207] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.retriable_status_codes = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486368] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.service_name = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486531] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.service_type = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486698] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.split_loggers = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.486857] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.status_code_retries = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487022] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.status_code_retry_delay = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487182] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.timeout = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487341] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.valid_interfaces = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487502] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_limit.version = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487672] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_reports.file_event_handler = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.487841] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488009] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] oslo_reports.log_dir = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488193] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488360] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488521] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488690] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.488854] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489023] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489200] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489362] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489524] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489689] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.489849] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490013] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] vif_plug_ovs_privileged.user = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490385] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490385] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490557] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490690] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.490861] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491037] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491210] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491374] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491551] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491722] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.isolate_vif = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.491895] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492071] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492244] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492414] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492575] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_vif_ovs.per_port_bridge = False {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492758] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_brick.lock_path = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.492914] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493089] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493261] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.capabilities = [21] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493445] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493649] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.helper_command = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493827] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.493995] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.494169] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] privsep_osbrick.user = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.494345] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.494508] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.group = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.494669] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.helper_command = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.494835] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.495014] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.495179] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] nova_sys_admin.user = None {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 661.495313] env[68443]: DEBUG oslo_service.service [None req-45724ff1-b2d6-4e23-bb23-35255b3dec79 None None] ******************************************************************************** {{(pid=68443) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 661.496097] env[68443]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 661.506180] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Getting list of instances from cluster (obj){ [ 661.506180] env[68443]: value = "domain-c8" [ 661.506180] env[68443]: _type = "ClusterComputeResource" [ 661.506180] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 661.507436] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b779f3-4527-4781-8185-4873e5f9c736 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.516667] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Got total of 0 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 661.517197] env[68443]: WARNING nova.virt.vmwareapi.driver [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 661.517677] env[68443]: INFO nova.virt.node [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Generated node identity feda0f0b-e324-4b78-af74-5e6cfd355a37 [ 661.517911] env[68443]: INFO nova.virt.node [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Wrote node identity feda0f0b-e324-4b78-af74-5e6cfd355a37 to /opt/stack/data/n-cpu-1/compute_id [ 661.530881] env[68443]: WARNING nova.compute.manager [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Compute nodes ['feda0f0b-e324-4b78-af74-5e6cfd355a37'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 661.567147] env[68443]: INFO nova.compute.manager [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 661.594990] env[68443]: WARNING nova.compute.manager [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 661.595233] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.595447] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.595638] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.595792] env[68443]: DEBUG nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 661.596888] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19612fcb-511e-49d3-9c68-185b23f7497b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.605082] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5dabbe9-7d0a-4df5-95d1-70af148c8cb9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.619372] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd085fb-0298-4bf7-a3b4-54176e1b8c94 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.625757] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9372e4d-0d22-431e-a39f-6fd504e99691 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.656109] env[68443]: DEBUG nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 661.656298] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.656411] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.669717] env[68443]: WARNING nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] No compute node record for cpu-1:feda0f0b-e324-4b78-af74-5e6cfd355a37: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host feda0f0b-e324-4b78-af74-5e6cfd355a37 could not be found. [ 661.684475] env[68443]: INFO nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: feda0f0b-e324-4b78-af74-5e6cfd355a37 [ 661.736469] env[68443]: DEBUG nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 661.736695] env[68443]: DEBUG nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 661.833745] env[68443]: INFO nova.scheduler.client.report [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] [req-c33f6916-f94d-4267-9b4f-4a72cb949a9d] Created resource provider record via placement API for resource provider with UUID feda0f0b-e324-4b78-af74-5e6cfd355a37 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 661.851078] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb64890-98a6-4215-9848-52f95403d99c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.858887] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd959e5-e92b-4efd-ae1f-6a8a00930679 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.890141] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d68bd3-8c62-4984-a293-0338f756ec4c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.897516] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd535b7d-0132-4abb-95b1-5e88f51fa34d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.911480] env[68443]: DEBUG nova.compute.provider_tree [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 661.952840] env[68443]: DEBUG nova.scheduler.client.report [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Updated inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 661.953112] env[68443]: DEBUG nova.compute.provider_tree [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Updating resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 generation from 0 to 1 during operation: update_inventory {{(pid=68443) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 661.953264] env[68443]: DEBUG nova.compute.provider_tree [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 662.001899] env[68443]: DEBUG nova.compute.provider_tree [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Updating resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 generation from 1 to 2 during operation: update_traits {{(pid=68443) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 662.019038] env[68443]: DEBUG nova.compute.resource_tracker [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 662.019240] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.019406] env[68443]: DEBUG nova.service [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Creating RPC server for service compute {{(pid=68443) start /opt/stack/nova/nova/service.py:182}} [ 662.034858] env[68443]: DEBUG nova.service [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] Join ServiceGroup membership for this service compute {{(pid=68443) start /opt/stack/nova/nova/service.py:199}} [ 662.035055] env[68443]: DEBUG nova.servicegroup.drivers.db [None req-fde7b526-9966-4877-b676-bb9bd27abf18 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68443) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 671.331506] env[68443]: DEBUG dbcounter [-] [68443] Writing DB stats nova_cell0:SELECT=1 {{(pid=68443) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 671.332218] env[68443]: DEBUG dbcounter [-] [68443] Writing DB stats nova_cell1:SELECT=1 {{(pid=68443) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 676.037748] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.050162] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 676.050162] env[68443]: value = "domain-c8" [ 676.050162] env[68443]: _type = "ClusterComputeResource" [ 676.050162] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 676.051311] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94308b43-f25b-498d-b81c-27cdc0fabc2b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.060306] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 0 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 676.060531] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.060835] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 676.060835] env[68443]: value = "domain-c8" [ 676.060835] env[68443]: _type = "ClusterComputeResource" [ 676.060835] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 676.061669] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b375d8-aa10-4271-9fa2-a3208902d17c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.068912] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 0 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 706.390123] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.390417] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.409938] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 706.559777] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.560102] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.562152] env[68443]: INFO nova.compute.claims [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.725864] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154bbff8-f30a-418e-9a73-3710fcdf0f6a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.739852] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962f289f-e82c-4e8a-acf7-ce1c822d12a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.786020] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd56c256-eb10-4c0d-9cdc-15fed20c3a7e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.792503] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72389346-d512-42cd-b76e-06efcda95c2f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.806442] env[68443]: DEBUG nova.compute.provider_tree [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.821682] env[68443]: DEBUG nova.scheduler.client.report [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.830577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "bbf050f4-9cf2-49f7-984d-d140f7aac3f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.830577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "bbf050f4-9cf2-49f7-984d-d140f7aac3f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.838849] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.839364] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 706.844174] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 706.898901] env[68443]: DEBUG nova.compute.utils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 706.902685] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 706.903031] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 706.924107] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 706.931923] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 706.932214] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.934608] env[68443]: INFO nova.compute.claims [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.064641] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 707.123554] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16feeab1-b79e-4b38-b196-0f116e942116 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.139791] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a822fbd-0c40-4a62-92c2-640c5252e647 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.185452] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dac3b22-0297-4c50-b9a0-234b56720179 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.203021] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa16a2c2-ce88-4528-820e-31aecabae62a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.224919] env[68443]: DEBUG nova.compute.provider_tree [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.254296] env[68443]: DEBUG nova.scheduler.client.report [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 707.283772] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.284441] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 707.351428] env[68443]: DEBUG nova.compute.utils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.354593] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Not allocating networking since 'none' was specified. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 707.367094] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 707.420703] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.420703] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.420703] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.421116] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.421116] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.421116] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.421116] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.421116] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.422042] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.422411] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.422622] env[68443]: DEBUG nova.virt.hardware [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.423545] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf029e7f-bbba-4e0c-a337-1cc61ee23dfc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.438989] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb164d54-82d4-4f46-ae58-0814ec351072 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.466786] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95844d78-03ac-4e16-b6aa-2d5ee38ea664 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.497742] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 707.505027] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.505742] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.523352] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 707.538942] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 707.538942] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 707.538942] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.539188] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 707.539188] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.539188] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 707.539590] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 707.539590] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 707.539723] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 707.539893] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 707.540082] env[68443]: DEBUG nova.virt.hardware [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 707.541340] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fcb01c-06c4-4011-8978-5848c2f60644 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.557167] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab98c288-3b59-4a0f-b1bc-8fb9a18f44d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.581086] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Instance VIF info [] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.590653] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.591631] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9fdd88a-be87-4eb4-ac2c-50043676121d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.609831] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Created folder: OpenStack in parent group-v4. [ 707.610573] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating folder: Project (6ca6c68ed61340669eeec0eab1afaa7d). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.613017] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fb0794b-c47e-4bf3-8580-c3a22325ca8e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.615086] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.615316] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.616774] env[68443]: INFO nova.compute.claims [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.628694] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Created folder: Project (6ca6c68ed61340669eeec0eab1afaa7d) in parent group-v673136. [ 707.628892] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating folder: Instances. Parent ref: group-v673137. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 707.629159] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a277cfc5-b528-4802-8a43-9e5c8f7dac87 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.639070] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Created folder: Instances in parent group-v673137. [ 707.639070] env[68443]: DEBUG oslo.service.loopingcall [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.639070] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 707.639599] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08c7f403-44cf-413b-b95b-efeb8fb8d88b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.659166] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.659166] env[68443]: value = "task-3373877" [ 707.659166] env[68443]: _type = "Task" [ 707.659166] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.676412] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373877, 'name': CreateVM_Task} progress is 6%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.751186] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "cd131349-f678-4271-af79-456624f090d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.751427] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "cd131349-f678-4271-af79-456624f090d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.776706] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 707.828066] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4ef62f-1865-4963-8cda-64c609830f94 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.841970] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980dc2b3-ad07-4e37-a144-7da7080cb32f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.858493] env[68443]: DEBUG nova.policy [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12e52689684045b7a066b0e26dcfbaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f89b76de8ed4ed3b757ff98465b51c8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 707.861328] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.890423] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3efa20-90cf-4e2b-80b7-c1aac3168530 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.898117] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1cbaa3-62b1-4929-99da-e258de02b870 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.912501] env[68443]: DEBUG nova.compute.provider_tree [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.923934] env[68443]: DEBUG nova.scheduler.client.report [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 707.944367] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.944367] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 707.945868] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.085s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.947242] env[68443]: INFO nova.compute.claims [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.992544] env[68443]: DEBUG nova.compute.utils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.994473] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 707.994822] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 708.010016] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 708.108725] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 708.122078] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6d2769-dc3b-4400-9d6c-b49461b65481 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.139823] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74765b36-698d-42c5-a704-9444d9803475 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.170600] env[68443]: DEBUG nova.policy [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f30cdc895b2145839b59b9fdd6ea4178', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29552b29778d4ab8837931ff7bfb502c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 708.174058] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.174330] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.174493] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.174669] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.174849] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.175330] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.175330] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.175330] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.175477] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.175634] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.175797] env[68443]: DEBUG nova.virt.hardware [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.179529] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9204877-193c-4173-88ec-76b3d726d05d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.183265] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d3eff7-5a09-4627-bda8-2fc7fabceceb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.196218] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373877, 'name': CreateVM_Task, 'duration_secs': 0.316009} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.196773] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 708.197824] env[68443]: DEBUG oslo_vmware.service [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ea85a9-3480-4543-b2b9-6004d9294091 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.208678] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5e5aa6-a4f9-4b47-9b83-3d1740e7391c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.212267] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.212357] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.213216] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 708.214345] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0b6a08-8049-4f17-9a30-487724e13066 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.218420] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63dce01f-82bd-4421-aa3a-76f772785bef {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.230436] env[68443]: DEBUG nova.compute.provider_tree [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.247709] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for the task: (returnval){ [ 708.247709] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]525a84f5-d56e-1aa7-9232-045e873844fa" [ 708.247709] env[68443]: _type = "Task" [ 708.247709] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.260181] env[68443]: DEBUG nova.scheduler.client.report [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.265248] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.268785] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.268785] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.268785] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.268785] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.268951] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fd99dc2-0425-491b-9558-f74a925adff3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.285284] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.285474] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 708.286387] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bdfd8a-b8e8-48dc-b279-3540dbe92599 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.295842] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.348s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.295842] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 708.297557] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c37fe4b5-67a1-40d9-9121-57dd06b7402d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.307416] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for the task: (returnval){ [ 708.307416] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52cfec25-4e35-c8e9-7345-86073e3e5ae2" [ 708.307416] env[68443]: _type = "Task" [ 708.307416] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.317537] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52cfec25-4e35-c8e9-7345-86073e3e5ae2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.377278] env[68443]: DEBUG nova.compute.utils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.378645] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 708.378810] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 708.400725] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 708.523380] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 708.560768] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 708.560768] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 708.560768] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 708.560935] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 708.560935] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 708.560935] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 708.561879] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 708.562094] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 708.562286] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 708.562448] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 708.563027] env[68443]: DEBUG nova.virt.hardware [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 708.563909] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afea72b3-eded-4306-80a6-65dd209bd931 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.573151] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c6b7b1-d52e-460c-80da-5ce12bc50350 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.752418] env[68443]: DEBUG nova.policy [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b05c1e0b902440cebcd5a4bb1f75a40e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9daabfb507b641d2b7291bc315ccd441', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 708.821468] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 708.821740] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating directory with path [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.821981] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5149104b-12c9-4861-a932-af693fe276c1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.846607] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Created directory with path [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.847365] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Fetch image to [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 708.848100] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 708.848499] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db191df-5355-4272-a088-3d3c926f62a0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.856234] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb25f6ff-c31b-44f3-96b5-ce176a73ad00 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.866833] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748e3e05-d8a6-4097-9a60-ea004df28356 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.899216] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b41ca5-1807-4768-960f-a0600e812ed2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.905412] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9a5f6243-9ac5-4a43-b00a-4e47cc708d2e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.937022] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 709.050430] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 709.129954] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 709.130165] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 709.806122] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Successfully created port: 0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.138456] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Successfully created port: 5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.275968] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Successfully created port: ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.158121] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "b0882dec-0d2a-4f62-933d-0d24f3340026" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.158121] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.186135] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 712.273768] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.274013] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.275692] env[68443]: INFO nova.compute.claims [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.488474] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d2fd6b-e8ab-4220-8a3f-292cfd11f985 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.498401] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eadcca-4f6d-462a-8a4a-314cb8faee7a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.538768] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916a89b6-bd35-4ee4-b0a6-0e389850f7a7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.549430] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c804c6-16de-4512-bb63-bda6a7cc133d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.563815] env[68443]: DEBUG nova.compute.provider_tree [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.576139] env[68443]: DEBUG nova.scheduler.client.report [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.600580] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.603480] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 712.651398] env[68443]: DEBUG nova.compute.utils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 712.653450] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 712.653761] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 712.670536] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 712.759826] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 712.795972] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.796243] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.796374] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.796682] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.796919] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.797160] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.797569] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.798251] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.798452] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.798867] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.798867] env[68443]: DEBUG nova.virt.hardware [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.800289] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870d394a-2044-439d-ab8a-6ee4658d2673 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.810258] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc6ec8e-fbe0-4f6f-a173-8aa394c5e73c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.972864] env[68443]: DEBUG nova.policy [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4160af900b38452190210da71c7a02c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e167b1788f3424fa03b032dbaf3c833', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 713.337140] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Successfully updated port: 0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.383419] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.383581] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.384937] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.721617] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.120024] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Successfully updated port: ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.140302] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.140302] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquired lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.140302] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.346826] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.409696] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Successfully updated port: 5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.424470] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.425375] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquired lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.425570] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.556633] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.823858] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Updating instance_info_cache with network_info: [{"id": "0e5ab64e-378b-49b0-a751-77b709844233", "address": "fa:16:3e:58:d5:d5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5ab64e-37", "ovs_interfaceid": "0e5ab64e-378b-49b0-a751-77b709844233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.847645] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.847645] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Instance network_info: |[{"id": "0e5ab64e-378b-49b0-a751-77b709844233", "address": "fa:16:3e:58:d5:d5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5ab64e-37", "ovs_interfaceid": "0e5ab64e-378b-49b0-a751-77b709844233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 714.847859] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:d5:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e5ab64e-378b-49b0-a751-77b709844233', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.857375] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating folder: Project (2f89b76de8ed4ed3b757ff98465b51c8). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.858124] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6ff051-85aa-412f-a62e-0cc1192038c6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.869707] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created folder: Project (2f89b76de8ed4ed3b757ff98465b51c8) in parent group-v673136. [ 714.869904] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating folder: Instances. Parent ref: group-v673140. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.870249] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c0b7c73-3cd9-454d-8097-cbdea34b258c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.880424] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created folder: Instances in parent group-v673140. [ 714.880669] env[68443]: DEBUG oslo.service.loopingcall [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.880860] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 714.881082] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2ec1bd0-6903-4b8f-8d20-ab96c84bdc74 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.904051] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.904051] env[68443]: value = "task-3373880" [ 714.904051] env[68443]: _type = "Task" [ 714.904051] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.914945] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373880, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.082086] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Successfully created port: 5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.094517] env[68443]: DEBUG nova.compute.manager [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Received event network-vif-plugged-0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 715.094747] env[68443]: DEBUG oslo_concurrency.lockutils [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] Acquiring lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.094989] env[68443]: DEBUG oslo_concurrency.lockutils [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] Lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.095316] env[68443]: DEBUG oslo_concurrency.lockutils [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] Lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.095316] env[68443]: DEBUG nova.compute.manager [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] No waiting events found dispatching network-vif-plugged-0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 715.095498] env[68443]: WARNING nova.compute.manager [req-59a39ada-3884-4ff2-9713-f7f0795bb57a req-4d25b2d4-9448-4393-b9eb-23361962b815 service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Received unexpected event network-vif-plugged-0e5ab64e-378b-49b0-a751-77b709844233 for instance with vm_state building and task_state spawning. [ 715.370689] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Updating instance_info_cache with network_info: [{"id": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "address": "fa:16:3e:42:5f:95", "network": {"id": "53a33681-df6c-4674-8eba-e80e4dbda85f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1008643964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9daabfb507b641d2b7291bc315ccd441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06fca1-4b", "ovs_interfaceid": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.394056] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Releasing lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.394056] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Instance network_info: |[{"id": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "address": "fa:16:3e:42:5f:95", "network": {"id": "53a33681-df6c-4674-8eba-e80e4dbda85f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1008643964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9daabfb507b641d2b7291bc315ccd441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06fca1-4b", "ovs_interfaceid": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 715.394277] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:5f:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad06fca1-4ba9-44ea-bc55-0b95f8da64bb', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.406546] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Creating folder: Project (9daabfb507b641d2b7291bc315ccd441). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.406546] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8dc60f3f-119a-4b5c-a4b7-b8c26f7643d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.420026] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373880, 'name': CreateVM_Task, 'duration_secs': 0.356815} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.420026] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 715.425378] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Created folder: Project (9daabfb507b641d2b7291bc315ccd441) in parent group-v673136. [ 715.425378] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Creating folder: Instances. Parent ref: group-v673143. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.426063] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7791922c-2a00-47d1-a6da-86249a4285c4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.437686] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Created folder: Instances in parent group-v673143. [ 715.437686] env[68443]: DEBUG oslo.service.loopingcall [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.437686] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd131349-f678-4271-af79-456624f090d1] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 715.437870] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66afa620-5196-42f8-a521-b26c0fa6bd80 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.456475] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.457092] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.457092] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 715.457796] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d62e16be-6980-478e-90dc-0ff1fc356e3f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.462767] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 715.462767] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52dde5df-1c8d-285c-32c5-575e3c5ae709" [ 715.462767] env[68443]: _type = "Task" [ 715.462767] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.468236] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.468236] env[68443]: value = "task-3373883" [ 715.468236] env[68443]: _type = "Task" [ 715.468236] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.476740] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Updating instance_info_cache with network_info: [{"id": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "address": "fa:16:3e:1e:11:21", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d55fbf5-70", "ovs_interfaceid": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.477496] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52dde5df-1c8d-285c-32c5-575e3c5ae709, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.483024] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373883, 'name': CreateVM_Task} progress is 6%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.500034] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Releasing lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.500034] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Instance network_info: |[{"id": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "address": "fa:16:3e:1e:11:21", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d55fbf5-70", "ovs_interfaceid": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 715.500246] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:11:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d55fbf5-7076-408b-b015-a3dcfd4e233c', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.511584] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Creating folder: Project (29552b29778d4ab8837931ff7bfb502c). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.511584] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bc32841-a34b-4aac-a85a-bf0957aca533 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.523199] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Created folder: Project (29552b29778d4ab8837931ff7bfb502c) in parent group-v673136. [ 715.523458] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Creating folder: Instances. Parent ref: group-v673146. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 715.523949] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d136d732-5a95-4013-8868-490c29dfe4d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.535829] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Created folder: Instances in parent group-v673146. [ 715.536154] env[68443]: DEBUG oslo.service.loopingcall [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.536443] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 715.536669] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aea37430-57bb-4e2b-92c4-edc1a41aed7b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.557680] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.557680] env[68443]: value = "task-3373886" [ 715.557680] env[68443]: _type = "Task" [ 715.557680] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.566936] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373886, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.976548] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.976816] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.977127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.981132] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373883, 'name': CreateVM_Task, 'duration_secs': 0.319299} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.981290] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd131349-f678-4271-af79-456624f090d1] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 715.981915] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.982087] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.982389] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 715.982651] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dd49726-35f6-4cd0-8d4c-ed3fdef012af {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.990726] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Waiting for the task: (returnval){ [ 715.990726] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5297fe66-bf6c-5126-0895-bac17c9a4af9" [ 715.990726] env[68443]: _type = "Task" [ 715.990726] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.995910] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5297fe66-bf6c-5126-0895-bac17c9a4af9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.069979] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373886, 'name': CreateVM_Task, 'duration_secs': 0.334424} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.070335] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 716.071175] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.501021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.501021] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.501021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.501021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.501424] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 716.501424] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8faf84e9-4f91-4c75-a64b-59d858fce620 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.505587] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Waiting for the task: (returnval){ [ 716.505587] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]529ab6a3-3e1d-6f56-8f36-ae9eca1d7fb5" [ 716.505587] env[68443]: _type = "Task" [ 716.505587] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.515421] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]529ab6a3-3e1d-6f56-8f36-ae9eca1d7fb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.019437] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.019719] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.019974] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.384557] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.384787] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.405699] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 717.479324] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.480192] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.483470] env[68443]: INFO nova.compute.claims [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.666839] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2bd66d-af84-476e-ba37-9ba4b3053e4f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.678880] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13fc63e-22c8-4cde-8790-8da1fdb2f9ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.721425] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b33f67-2a7d-4f64-8b8f-76631a0be07f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.729614] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bf2fb0-c0a3-44f7-968a-88c554e645c4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.744837] env[68443]: DEBUG nova.compute.provider_tree [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.758260] env[68443]: DEBUG nova.scheduler.client.report [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.776079] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.776729] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 717.817493] env[68443]: DEBUG nova.compute.utils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 717.818742] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 717.818899] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 717.834033] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.834033] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.834174] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 717.834256] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 717.836457] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 717.868454] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.868616] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.868748] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.868872] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cd131349-f678-4271-af79-456624f090d1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.868995] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.869629] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 717.869796] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 717.870869] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.875061] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.875962] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.876356] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.876652] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.876886] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.877092] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 717.877281] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.897578] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.897814] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.898129] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.898191] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 717.899771] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6671eef4-a4a4-4667-b463-adc414ad738d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.909634] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9970acc-0579-4856-bea1-64b9e12edeb1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.929833] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001fb2a4-e963-47bc-b095-529427a739a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.934312] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 717.940723] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f24b3f16-8f8a-4e9f-ae33-2c729664a0e3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.971235] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181000MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 717.971402] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.971608] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.981974] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.985232] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.985232] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.985232] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.985232] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.985232] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.985464] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.985464] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.985464] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.985464] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.985464] env[68443]: DEBUG nova.virt.hardware [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.985616] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80440dbd-b6a0-4826-9422-2a7f3e169f31 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.992880] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085fafd9-5bb3-43a1-932b-de3b2c02ab91 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.083510] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7dd8326b-2ccd-4c27-8fc4-fc7910042870 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.083674] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bbf050f4-9cf2-49f7-984d-d140f7aac3f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.083805] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.083927] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cd131349-f678-4271-af79-456624f090d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.084057] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0882dec-0d2a-4f62-933d-0d24f3340026 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.084178] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 718.084382] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 718.084538] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 718.132751] env[68443]: DEBUG nova.policy [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72ff88bcb8624617b34bbcb00c2ae63c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1aaa71d80d348f2a2a5c36376ef0bcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 718.208061] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baac229b-4223-4e84-8fdb-1ba2679720dd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.213982] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600c3c0a-b17b-47cd-8306-3990f988a953 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.250298] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01054732-cb40-4663-acc0-c5be06c054ec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.258748] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1531432-cb99-487c-944d-591084412674 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.276297] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.297507] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 718.317112] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 718.317281] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.346s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.410035] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Successfully updated port: 5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 718.426711] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 718.426863] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquired lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.427026] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 718.644034] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 719.098400] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Received event network-vif-plugged-ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 719.098400] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquiring lock "cd131349-f678-4271-af79-456624f090d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.098400] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Lock "cd131349-f678-4271-af79-456624f090d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.098400] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Lock "cd131349-f678-4271-af79-456624f090d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.099422] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] No waiting events found dispatching network-vif-plugged-ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 719.099634] env[68443]: WARNING nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Received unexpected event network-vif-plugged-ad06fca1-4ba9-44ea-bc55-0b95f8da64bb for instance with vm_state building and task_state spawning. [ 719.099810] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Received event network-vif-plugged-5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 719.099976] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquiring lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.100177] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.100332] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.100501] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] No waiting events found dispatching network-vif-plugged-5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 719.100655] env[68443]: WARNING nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Received unexpected event network-vif-plugged-5d55fbf5-7076-408b-b015-a3dcfd4e233c for instance with vm_state building and task_state spawning. [ 719.100809] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Received event network-changed-0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 719.100958] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Refreshing instance network info cache due to event network-changed-0e5ab64e-378b-49b0-a751-77b709844233. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 719.101142] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquiring lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.101271] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquired lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.101420] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Refreshing network info cache for port 0e5ab64e-378b-49b0-a751-77b709844233 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.570802] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Updating instance_info_cache with network_info: [{"id": "5c818b7a-f91d-489a-b8f6-4004766a7087", "address": "fa:16:3e:2a:90:67", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c818b7a-f9", "ovs_interfaceid": "5c818b7a-f91d-489a-b8f6-4004766a7087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.589032] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Releasing lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.589032] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance network_info: |[{"id": "5c818b7a-f91d-489a-b8f6-4004766a7087", "address": "fa:16:3e:2a:90:67", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c818b7a-f9", "ovs_interfaceid": "5c818b7a-f91d-489a-b8f6-4004766a7087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 719.589316] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:90:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c818b7a-f91d-489a-b8f6-4004766a7087', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.599157] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Creating folder: Project (0e167b1788f3424fa03b032dbaf3c833). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 719.599807] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f969ce3-6582-49bb-9b0a-f96c819b46e5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.611769] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Created folder: Project (0e167b1788f3424fa03b032dbaf3c833) in parent group-v673136. [ 719.611963] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Creating folder: Instances. Parent ref: group-v673149. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 719.612374] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2f1e34d-152f-4034-a681-a9094ce08790 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.625964] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Created folder: Instances in parent group-v673149. [ 719.625964] env[68443]: DEBUG oslo.service.loopingcall [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.625964] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 719.625964] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3725bf78-67ee-4f9b-bd58-bec92cab2a1a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.653430] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.653430] env[68443]: value = "task-3373889" [ 719.653430] env[68443]: _type = "Task" [ 719.653430] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.666102] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373889, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.171695] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373889, 'name': CreateVM_Task, 'duration_secs': 0.3533} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.171990] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 720.172764] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.172935] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.173288] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 720.173542] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8dd34a5-d9db-4f1b-bd22-21f0f0d069ee {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.181811] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for the task: (returnval){ [ 720.181811] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52d4fb32-3636-db0c-1940-beecd541cc66" [ 720.181811] env[68443]: _type = "Task" [ 720.181811] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.196346] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.197051] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.197312] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.368566] env[68443]: DEBUG nova.compute.manager [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Received event network-vif-plugged-5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 720.368779] env[68443]: DEBUG oslo_concurrency.lockutils [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] Acquiring lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.368981] env[68443]: DEBUG oslo_concurrency.lockutils [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.369487] env[68443]: DEBUG oslo_concurrency.lockutils [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.369693] env[68443]: DEBUG nova.compute.manager [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] No waiting events found dispatching network-vif-plugged-5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 720.370350] env[68443]: WARNING nova.compute.manager [req-c6864c2b-1026-4bbd-9453-3f6ebea5ae4f req-4865f318-8d91-4e7d-af93-1d68a2781f93 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Received unexpected event network-vif-plugged-5c818b7a-f91d-489a-b8f6-4004766a7087 for instance with vm_state building and task_state spawning. [ 720.764742] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Successfully created port: 69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.801711] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Updated VIF entry in instance network info cache for port 0e5ab64e-378b-49b0-a751-77b709844233. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 720.801711] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Updating instance_info_cache with network_info: [{"id": "0e5ab64e-378b-49b0-a751-77b709844233", "address": "fa:16:3e:58:d5:d5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.171", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e5ab64e-37", "ovs_interfaceid": "0e5ab64e-378b-49b0-a751-77b709844233", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.814612] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Releasing lock "refresh_cache-7dd8326b-2ccd-4c27-8fc4-fc7910042870" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.818247] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Received event network-changed-ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 720.818247] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Refreshing instance network info cache due to event network-changed-ad06fca1-4ba9-44ea-bc55-0b95f8da64bb. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 720.818247] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquiring lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 720.818247] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquired lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.818247] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Refreshing network info cache for port ad06fca1-4ba9-44ea-bc55-0b95f8da64bb {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 722.482417] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Updated VIF entry in instance network info cache for port ad06fca1-4ba9-44ea-bc55-0b95f8da64bb. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 722.482835] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: cd131349-f678-4271-af79-456624f090d1] Updating instance_info_cache with network_info: [{"id": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "address": "fa:16:3e:42:5f:95", "network": {"id": "53a33681-df6c-4674-8eba-e80e4dbda85f", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1008643964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9daabfb507b641d2b7291bc315ccd441", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad06fca1-4b", "ovs_interfaceid": "ad06fca1-4ba9-44ea-bc55-0b95f8da64bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.501484] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Releasing lock "refresh_cache-cd131349-f678-4271-af79-456624f090d1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.501484] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Received event network-changed-5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 722.501484] env[68443]: DEBUG nova.compute.manager [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Refreshing instance network info cache due to event network-changed-5d55fbf5-7076-408b-b015-a3dcfd4e233c. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 722.501484] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquiring lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.501484] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Acquired lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.501859] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Refreshing network info cache for port 5d55fbf5-7076-408b-b015-a3dcfd4e233c {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 723.950758] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Updated VIF entry in instance network info cache for port 5d55fbf5-7076-408b-b015-a3dcfd4e233c. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 723.951416] env[68443]: DEBUG nova.network.neutron [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Updating instance_info_cache with network_info: [{"id": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "address": "fa:16:3e:1e:11:21", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.190", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d55fbf5-70", "ovs_interfaceid": "5d55fbf5-7076-408b-b015-a3dcfd4e233c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.962494] env[68443]: DEBUG oslo_concurrency.lockutils [req-2766562c-13a5-41d3-96d1-8b37f59a571e req-e0015374-23de-4340-88be-64c0c0501a8b service nova] Releasing lock "refresh_cache-3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.232837] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "f5aa2b1b-c290-42f2-84d3-272415184f14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.233886] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.248407] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 724.330790] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.330790] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.333112] env[68443]: INFO nova.compute.claims [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.349340] env[68443]: DEBUG nova.compute.manager [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Received event network-changed-5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 724.349586] env[68443]: DEBUG nova.compute.manager [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Refreshing instance network info cache due to event network-changed-5c818b7a-f91d-489a-b8f6-4004766a7087. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 724.349750] env[68443]: DEBUG oslo_concurrency.lockutils [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] Acquiring lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.349891] env[68443]: DEBUG oslo_concurrency.lockutils [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] Acquired lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.350290] env[68443]: DEBUG nova.network.neutron [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Refreshing network info cache for port 5c818b7a-f91d-489a-b8f6-4004766a7087 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 724.362298] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Successfully updated port: 69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.398423] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.398544] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquired lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.398611] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.553495] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ababaa86-c0d9-4d37-8244-c12ce102a9cf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.562785] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0b914a-0343-4996-a9c5-9b5f40c751c2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.595240] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.597683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2969f05e-13a1-4950-9d19-959b3d0298e5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.605465] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94e381a-75a7-4fc8-994c-b7e9e75244de {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.619111] env[68443]: DEBUG nova.compute.provider_tree [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.629635] env[68443]: DEBUG nova.scheduler.client.report [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.646594] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.647132] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 724.693789] env[68443]: DEBUG nova.compute.utils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.696347] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 724.696520] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 724.705427] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 724.817695] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 724.844670] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.844763] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.845281] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.845281] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.845281] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.845439] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.846059] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.846059] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.846059] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.846209] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.846241] env[68443]: DEBUG nova.virt.hardware [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.847632] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da893872-06a5-424f-ab0a-b7f895027ae0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.859362] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647a1823-adc2-4819-b6c4-44bd884e80ee {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.882856] env[68443]: DEBUG nova.policy [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41d31fcc6a5242108c0538442ae070f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f4009e4cde244d28805f6935ba6e399', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 725.463757] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Updating instance_info_cache with network_info: [{"id": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "address": "fa:16:3e:74:12:74", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69dc6a0f-a3", "ovs_interfaceid": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.476992] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Releasing lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.477335] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance network_info: |[{"id": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "address": "fa:16:3e:74:12:74", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69dc6a0f-a3", "ovs_interfaceid": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 725.477797] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:12:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69dc6a0f-a3a3-43b5-a495-18d0880040f8', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.487189] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Creating folder: Project (b1aaa71d80d348f2a2a5c36376ef0bcd). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.487902] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c73f16b6-4b88-453e-acc3-f337d4193498 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.500184] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Created folder: Project (b1aaa71d80d348f2a2a5c36376ef0bcd) in parent group-v673136. [ 725.500586] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Creating folder: Instances. Parent ref: group-v673152. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 725.500698] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5784feb-b888-43da-9141-51f3b5affa72 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.511270] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Created folder: Instances in parent group-v673152. [ 725.511526] env[68443]: DEBUG oslo.service.loopingcall [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.511992] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 725.511992] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef008a99-511e-44d9-b39b-7c2a18e3580f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.533858] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.533858] env[68443]: value = "task-3373892" [ 725.533858] env[68443]: _type = "Task" [ 725.533858] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.542703] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373892, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.597281] env[68443]: DEBUG nova.network.neutron [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Updated VIF entry in instance network info cache for port 5c818b7a-f91d-489a-b8f6-4004766a7087. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 725.597673] env[68443]: DEBUG nova.network.neutron [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Updating instance_info_cache with network_info: [{"id": "5c818b7a-f91d-489a-b8f6-4004766a7087", "address": "fa:16:3e:2a:90:67", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c818b7a-f9", "ovs_interfaceid": "5c818b7a-f91d-489a-b8f6-4004766a7087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.609406] env[68443]: DEBUG oslo_concurrency.lockutils [req-7d1b4f05-de88-44f1-9ffc-e98ff86d2fcb req-60e5c58f-00fe-4ca2-93dd-e0f4ce2c80c4 service nova] Releasing lock "refresh_cache-b0882dec-0d2a-4f62-933d-0d24f3340026" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.047504] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373892, 'name': CreateVM_Task, 'duration_secs': 0.363144} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.047983] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 726.048450] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.048705] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.048909] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.049625] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08eb97ae-5404-4114-9e94-1aee1df37a02 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.056025] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for the task: (returnval){ [ 726.056025] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52358790-5f90-79e0-433e-2b35ac54d718" [ 726.056025] env[68443]: _type = "Task" [ 726.056025] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.062885] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52358790-5f90-79e0-433e-2b35ac54d718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.462512] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.462833] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.482819] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 726.544179] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.546580] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.548590] env[68443]: INFO nova.compute.claims [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.568939] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.568939] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.569126] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.783058] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c05b6b-fec4-4c71-acc0-08c53ccfe17f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.791961] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37ce6b3-a9a8-4a54-9fc2-4056b9d1ffec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.832824] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49ad0c5-5ae6-4461-a3ca-ec9ea33d05fb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.841683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e249d602-8187-4fee-81c4-5d3e4927eead {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.860972] env[68443]: DEBUG nova.compute.provider_tree [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.874572] env[68443]: DEBUG nova.scheduler.client.report [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.902535] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.359s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.903060] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 726.914065] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Successfully created port: 6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.932351] env[68443]: DEBUG nova.compute.manager [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Received event network-vif-plugged-69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 726.932351] env[68443]: DEBUG oslo_concurrency.lockutils [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] Acquiring lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.932466] env[68443]: DEBUG oslo_concurrency.lockutils [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.932636] env[68443]: DEBUG oslo_concurrency.lockutils [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.932788] env[68443]: DEBUG nova.compute.manager [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] No waiting events found dispatching network-vif-plugged-69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 726.932948] env[68443]: WARNING nova.compute.manager [req-debd3a09-ec25-4696-997e-bca86e5237be req-f13e0910-c709-4202-8142-2a9f7edc947c service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Received unexpected event network-vif-plugged-69dc6a0f-a3a3-43b5-a495-18d0880040f8 for instance with vm_state building and task_state spawning. [ 726.951470] env[68443]: DEBUG nova.compute.utils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.952979] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 726.954447] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 726.972390] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 727.085238] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 727.097579] env[68443]: DEBUG nova.policy [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2b7c5f42cb549e8bb29d91d1a3f2e39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ccba298964a42b78913f8c837ddd032', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 727.121267] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.121267] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.121388] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.121495] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.121597] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.121777] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.122424] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.126577] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.126577] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.126577] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.126577] env[68443]: DEBUG nova.virt.hardware [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.126577] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fe7eb0-2b6a-43e5-8b22-e302f4f9c706 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.136608] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bed954-868c-48b2-b13f-9962410e7e20 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.200153] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Successfully created port: 8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.885506] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Successfully updated port: 6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.900523] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.900523] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquired lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.900523] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 729.990307] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.617236] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.617236] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.628157] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 730.635403] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Updating instance_info_cache with network_info: [{"id": "6499dafd-2149-4449-aca2-d40478ac9093", "address": "fa:16:3e:b7:86:30", "network": {"id": "42a8a94c-8105-467f-abe9-18bf06e5bc87", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1917813979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4009e4cde244d28805f6935ba6e399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6499dafd-21", "ovs_interfaceid": "6499dafd-2149-4449-aca2-d40478ac9093", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.659035] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Releasing lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.659270] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance network_info: |[{"id": "6499dafd-2149-4449-aca2-d40478ac9093", "address": "fa:16:3e:b7:86:30", "network": {"id": "42a8a94c-8105-467f-abe9-18bf06e5bc87", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1917813979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4009e4cde244d28805f6935ba6e399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6499dafd-21", "ovs_interfaceid": "6499dafd-2149-4449-aca2-d40478ac9093", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.659654] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:86:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6499dafd-2149-4449-aca2-d40478ac9093', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.670306] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Creating folder: Project (0f4009e4cde244d28805f6935ba6e399). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.671039] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cee7596d-4b1c-4c57-8da9-3a1e39898f63 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.689185] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Created folder: Project (0f4009e4cde244d28805f6935ba6e399) in parent group-v673136. [ 730.689185] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Creating folder: Instances. Parent ref: group-v673155. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 730.689185] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce286ab3-66d1-470b-ac2c-2abbd851495d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.736026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.736155] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.737743] env[68443]: INFO nova.compute.claims [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.975848] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49e552c-bb74-425f-b772-c6d4b06f24a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.987558] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709d9154-4d0d-4159-800b-7dc995cefd1c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.993616] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Created folder: Instances in parent group-v673155. [ 730.993782] env[68443]: DEBUG oslo.service.loopingcall [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.994366] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 730.994602] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61a2ec8b-4afa-4de8-901d-157144b7a7f4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.038487] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f6e48d-c1fb-4b12-8371-7795bde18004 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.042572] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.042572] env[68443]: value = "task-3373895" [ 731.042572] env[68443]: _type = "Task" [ 731.042572] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.050625] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce05f256-17a0-4bc7-a33b-a7774735aebc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.057971] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373895, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.068390] env[68443]: DEBUG nova.compute.provider_tree [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.079132] env[68443]: DEBUG nova.scheduler.client.report [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 731.117967] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.381s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.118344] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 731.167995] env[68443]: DEBUG nova.compute.utils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 731.169386] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 731.169704] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 731.188049] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 731.299532] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 731.341980] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:50:20Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1317626704',id=23,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-574923699',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.342325] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.342425] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.343403] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.343510] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.343640] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.343866] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.344038] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.344214] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.344378] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.344554] env[68443]: DEBUG nova.virt.hardware [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.345980] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f285fa16-75a8-4c84-aeea-9bd92e8683e2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.361273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fe0363-7ffb-474d-b5fc-a1f6c8f20f0c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.556525] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373895, 'name': CreateVM_Task, 'duration_secs': 0.408927} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.556855] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 731.557866] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.558209] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.558949] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 731.559080] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb30c249-12ff-48c7-936a-2c3b266518cb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.566415] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for the task: (returnval){ [ 731.566415] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]522e0423-5281-2b14-9fbc-fb4352db39c7" [ 731.566415] env[68443]: _type = "Task" [ 731.566415] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.580202] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]522e0423-5281-2b14-9fbc-fb4352db39c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.771335] env[68443]: DEBUG nova.policy [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4058239d9b1343c9a77765e1aa77cf56', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '34011520f33549df90bea3aa7c3b6ed4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 732.078414] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.078789] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.078894] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.339299] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Successfully updated port: 8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.353709] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.353858] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquired lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.354032] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 732.510073] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.047312] env[68443]: DEBUG nova.compute.manager [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Received event network-changed-69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 733.047536] env[68443]: DEBUG nova.compute.manager [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Refreshing instance network info cache due to event network-changed-69dc6a0f-a3a3-43b5-a495-18d0880040f8. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 733.047729] env[68443]: DEBUG oslo_concurrency.lockutils [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] Acquiring lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.047871] env[68443]: DEBUG oslo_concurrency.lockutils [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] Acquired lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.054398] env[68443]: DEBUG nova.network.neutron [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Refreshing network info cache for port 69dc6a0f-a3a3-43b5-a495-18d0880040f8 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 733.125626] env[68443]: DEBUG nova.compute.manager [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Received event network-vif-plugged-6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 733.125893] env[68443]: DEBUG oslo_concurrency.lockutils [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] Acquiring lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.126060] env[68443]: DEBUG oslo_concurrency.lockutils [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.126414] env[68443]: DEBUG oslo_concurrency.lockutils [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.126414] env[68443]: DEBUG nova.compute.manager [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] No waiting events found dispatching network-vif-plugged-6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 733.126548] env[68443]: WARNING nova.compute.manager [req-0059063a-4919-4160-86ed-7b0229382733 req-434b8920-24aa-4758-90c6-10da4ff36304 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Received unexpected event network-vif-plugged-6499dafd-2149-4449-aca2-d40478ac9093 for instance with vm_state building and task_state spawning. [ 733.275679] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Updating instance_info_cache with network_info: [{"id": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "address": "fa:16:3e:a9:39:db", "network": {"id": "5fc0089e-7343-4462-9ee6-114ce0cf4f46", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-994007164-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ccba298964a42b78913f8c837ddd032", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8c420a-f5", "ovs_interfaceid": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.297746] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.298021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.310202] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Releasing lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.310915] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance network_info: |[{"id": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "address": "fa:16:3e:a9:39:db", "network": {"id": "5fc0089e-7343-4462-9ee6-114ce0cf4f46", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-994007164-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ccba298964a42b78913f8c837ddd032", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8c420a-f5", "ovs_interfaceid": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 733.312470] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:39:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '08fb4857-7f9b-4f97-86ef-415341fb595d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a8c420a-f505-4596-9dfb-b80c6c89de51', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.322767] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Creating folder: Project (3ccba298964a42b78913f8c837ddd032). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 733.324557] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c6d00f4-86b6-4883-ba5c-6c951aeeef21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.326715] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 733.342492] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Created folder: Project (3ccba298964a42b78913f8c837ddd032) in parent group-v673136. [ 733.342492] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Creating folder: Instances. Parent ref: group-v673158. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 733.345282] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccd5b080-d17e-4905-b4f1-2331e1a6f59f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.347934] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "63801b63-1601-4e77-a500-3569713177bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.347934] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.362612] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Created folder: Instances in parent group-v673158. [ 733.362612] env[68443]: DEBUG oslo.service.loopingcall [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.362612] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 733.362812] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ad5665e-0b4d-4582-aa34-7149415e5178 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.393603] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.393603] env[68443]: value = "task-3373898" [ 733.393603] env[68443]: _type = "Task" [ 733.393603] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.404135] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373898, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.415139] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.415435] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.416993] env[68443]: INFO nova.compute.claims [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.683500] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a631994-38db-4d83-ad20-8cfccb81876e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.691922] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6e7a9f-6862-47af-8ce6-58b116a2702e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.730676] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8631da-e6f6-4223-a1d9-893d8d570280 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.738062] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c653f59c-ecb3-48d3-838a-4c0eac50fe36 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.752330] env[68443]: DEBUG nova.compute.provider_tree [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.762959] env[68443]: DEBUG nova.scheduler.client.report [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 733.785603] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.370s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.786241] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 733.832756] env[68443]: DEBUG nova.compute.utils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.833903] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 733.834362] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 733.863300] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 733.919349] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373898, 'name': CreateVM_Task, 'duration_secs': 0.490218} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.920185] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 733.920185] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.920286] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.920582] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 733.920830] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52f57303-386a-47bb-9bbd-88483a365bc0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.925869] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for the task: (returnval){ [ 733.925869] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52422254-ef88-3b77-20b4-e9a92530f846" [ 733.925869] env[68443]: _type = "Task" [ 733.925869] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.940034] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.940299] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.940603] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.957929] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 733.994866] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 733.995417] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 733.995726] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.996042] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.000016] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.000016] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.000016] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.000016] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.000016] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.000248] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.000248] env[68443]: DEBUG nova.virt.hardware [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.000248] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af063c7b-46af-4b38-93b6-b8808ad346a2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.006828] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953eb36b-74a6-4c88-b99a-e94d284db163 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.040028] env[68443]: DEBUG nova.policy [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f555aab89a4849eda865fa66f5bf9491', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfe5754cc87443159f24335a1bce0c4c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 734.307982] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Successfully created port: 82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.409634] env[68443]: DEBUG nova.network.neutron [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Updated VIF entry in instance network info cache for port 69dc6a0f-a3a3-43b5-a495-18d0880040f8. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 734.409788] env[68443]: DEBUG nova.network.neutron [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Updating instance_info_cache with network_info: [{"id": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "address": "fa:16:3e:74:12:74", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69dc6a0f-a3", "ovs_interfaceid": "69dc6a0f-a3a3-43b5-a495-18d0880040f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.425080] env[68443]: DEBUG oslo_concurrency.lockutils [req-fb7a8f47-3090-40f8-9da8-78b1177b4344 req-52cb7486-c940-453c-856a-d3a9baa5d569 service nova] Releasing lock "refresh_cache-5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.304693] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Successfully created port: 6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.990929] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Successfully updated port: 6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.005643] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.005643] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquired lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.005643] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.093762] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.691446] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Updating instance_info_cache with network_info: [{"id": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "address": "fa:16:3e:70:68:27", "network": {"id": "2929f4f5-4e27-42c8-b9d3-ac83ede3dc8b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-478369306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfe5754cc87443159f24335a1bce0c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecd54b1-ed", "ovs_interfaceid": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.710063] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Releasing lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.710063] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance network_info: |[{"id": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "address": "fa:16:3e:70:68:27", "network": {"id": "2929f4f5-4e27-42c8-b9d3-ac83ede3dc8b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-478369306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfe5754cc87443159f24335a1bce0c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecd54b1-ed", "ovs_interfaceid": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 738.710293] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:68:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55bd18a7-39a8-4d07-9088-9b944f9ff710', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ecd54b1-edd3-4b8c-b0be-136ec65e0f60', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.720463] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Creating folder: Project (cfe5754cc87443159f24335a1bce0c4c). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 738.721454] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb9dc84-bd4b-41fb-8e58-349a0629d0d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.733540] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Created folder: Project (cfe5754cc87443159f24335a1bce0c4c) in parent group-v673136. [ 738.736021] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Creating folder: Instances. Parent ref: group-v673161. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 738.736021] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d8eff5d-326e-4b8d-b8c2-b784c75b081a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.746388] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Created folder: Instances in parent group-v673161. [ 738.746388] env[68443]: DEBUG oslo.service.loopingcall [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 738.747021] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 738.749011] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4e023f0-9e84-4bff-851d-fc89feee3760 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.771389] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 738.771389] env[68443]: value = "task-3373901" [ 738.771389] env[68443]: _type = "Task" [ 738.771389] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.779929] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373901, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.944134] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Successfully updated port: 82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.965734] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.965734] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquired lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.965734] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 739.218831] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.282176] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373901, 'name': CreateVM_Task, 'duration_secs': 0.301442} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.282347] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 739.284030] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.284030] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.284030] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 739.284030] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b01e01e-e307-408b-9594-8e846cbf97df {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.289133] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for the task: (returnval){ [ 739.289133] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520fde63-e96d-e735-963b-bef3c26f12dd" [ 739.289133] env[68443]: _type = "Task" [ 739.289133] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.301183] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520fde63-e96d-e735-963b-bef3c26f12dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.614518] env[68443]: DEBUG nova.compute.manager [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Received event network-changed-6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 739.614708] env[68443]: DEBUG nova.compute.manager [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Refreshing instance network info cache due to event network-changed-6499dafd-2149-4449-aca2-d40478ac9093. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 739.614925] env[68443]: DEBUG oslo_concurrency.lockutils [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] Acquiring lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.616837] env[68443]: DEBUG oslo_concurrency.lockutils [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] Acquired lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.617211] env[68443]: DEBUG nova.network.neutron [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Refreshing network info cache for port 6499dafd-2149-4449-aca2-d40478ac9093 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.647531] env[68443]: DEBUG nova.compute.manager [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Received event network-vif-plugged-8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 739.647737] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Acquiring lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.647935] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.648129] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.648325] env[68443]: DEBUG nova.compute.manager [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] No waiting events found dispatching network-vif-plugged-8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.648493] env[68443]: WARNING nova.compute.manager [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Received unexpected event network-vif-plugged-8a8c420a-f505-4596-9dfb-b80c6c89de51 for instance with vm_state building and task_state spawning. [ 739.648646] env[68443]: DEBUG nova.compute.manager [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Received event network-changed-8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 739.648793] env[68443]: DEBUG nova.compute.manager [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Refreshing instance network info cache due to event network-changed-8a8c420a-f505-4596-9dfb-b80c6c89de51. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 739.648965] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Acquiring lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.649347] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Acquired lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.649688] env[68443]: DEBUG nova.network.neutron [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Refreshing network info cache for port 8a8c420a-f505-4596-9dfb-b80c6c89de51 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 739.805689] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.806051] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.806349] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.915490] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "6df57929-1115-4080-8131-8960525eb833" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.915792] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.440065] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Updating instance_info_cache with network_info: [{"id": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "address": "fa:16:3e:8f:3c:04", "network": {"id": "6fb247e7-84f2-4cbf-894e-927bcf262e76", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-337888342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34011520f33549df90bea3aa7c3b6ed4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82df3868-4d", "ovs_interfaceid": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.464385] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Releasing lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.465561] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance network_info: |[{"id": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "address": "fa:16:3e:8f:3c:04", "network": {"id": "6fb247e7-84f2-4cbf-894e-927bcf262e76", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-337888342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34011520f33549df90bea3aa7c3b6ed4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82df3868-4d", "ovs_interfaceid": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 740.466216] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:3c:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82df3868-4dab-4503-bc7a-5eecd49a67b6', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.477018] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Creating folder: Project (34011520f33549df90bea3aa7c3b6ed4). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.477694] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-881af634-dce0-4729-8284-bdbe30f0f75a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.490954] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Created folder: Project (34011520f33549df90bea3aa7c3b6ed4) in parent group-v673136. [ 740.490954] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Creating folder: Instances. Parent ref: group-v673164. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 740.490954] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8b01df4-6c4e-421b-b908-63f3d2c2965c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.500815] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Created folder: Instances in parent group-v673164. [ 740.500973] env[68443]: DEBUG oslo.service.loopingcall [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.501184] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 740.501398] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fded2f2c-8398-4f92-969b-10f4fa57e0a8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.523994] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.523994] env[68443]: value = "task-3373904" [ 740.523994] env[68443]: _type = "Task" [ 740.523994] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.531954] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373904, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.797676] env[68443]: DEBUG nova.network.neutron [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Updated VIF entry in instance network info cache for port 8a8c420a-f505-4596-9dfb-b80c6c89de51. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.798055] env[68443]: DEBUG nova.network.neutron [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Updating instance_info_cache with network_info: [{"id": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "address": "fa:16:3e:a9:39:db", "network": {"id": "5fc0089e-7343-4462-9ee6-114ce0cf4f46", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-994007164-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ccba298964a42b78913f8c837ddd032", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "08fb4857-7f9b-4f97-86ef-415341fb595d", "external-id": "nsx-vlan-transportzone-579", "segmentation_id": 579, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8c420a-f5", "ovs_interfaceid": "8a8c420a-f505-4596-9dfb-b80c6c89de51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.811487] env[68443]: DEBUG oslo_concurrency.lockutils [req-dfbb48f8-663d-4c02-8102-cfb0bb0783c2 req-bc63cd61-09e1-4dab-950d-9fda44b5ff41 service nova] Releasing lock "refresh_cache-280e1cc5-91db-4a03-bca4-b2d2e4ddd221" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.892550] env[68443]: DEBUG nova.network.neutron [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Updated VIF entry in instance network info cache for port 6499dafd-2149-4449-aca2-d40478ac9093. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.892947] env[68443]: DEBUG nova.network.neutron [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Updating instance_info_cache with network_info: [{"id": "6499dafd-2149-4449-aca2-d40478ac9093", "address": "fa:16:3e:b7:86:30", "network": {"id": "42a8a94c-8105-467f-abe9-18bf06e5bc87", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1917813979-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f4009e4cde244d28805f6935ba6e399", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6499dafd-21", "ovs_interfaceid": "6499dafd-2149-4449-aca2-d40478ac9093", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.908894] env[68443]: DEBUG oslo_concurrency.lockutils [req-c892a857-a53c-49f7-af7f-cad1e5f21807 req-f3d3cb0e-464e-4c56-938f-eec4a1ea0ea1 service nova] Releasing lock "refresh_cache-f5aa2b1b-c290-42f2-84d3-272415184f14" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.035883] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373904, 'name': CreateVM_Task, 'duration_secs': 0.327543} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.036278] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 741.037896] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.038035] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.042022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.042022] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f19394-cb5a-4947-b02f-eddac6083b1b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.046311] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for the task: (returnval){ [ 741.046311] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5252ed77-ac18-e37b-89d2-ef3c0cf23144" [ 741.046311] env[68443]: _type = "Task" [ 741.046311] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.054596] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5252ed77-ac18-e37b-89d2-ef3c0cf23144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.561330] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.562751] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.562751] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.915456] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "844f2b9d-ad2a-431a-a587-65ba446d571f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.916260] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.566918] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.567277] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.057155] env[68443]: DEBUG nova.compute.manager [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Received event network-vif-plugged-82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 744.057155] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Acquiring lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.057310] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.057423] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.057584] env[68443]: DEBUG nova.compute.manager [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] No waiting events found dispatching network-vif-plugged-82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 744.057741] env[68443]: WARNING nova.compute.manager [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Received unexpected event network-vif-plugged-82df3868-4dab-4503-bc7a-5eecd49a67b6 for instance with vm_state building and task_state spawning. [ 744.057910] env[68443]: DEBUG nova.compute.manager [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Received event network-changed-82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 744.058129] env[68443]: DEBUG nova.compute.manager [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Refreshing instance network info cache due to event network-changed-82df3868-4dab-4503-bc7a-5eecd49a67b6. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 744.058374] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Acquiring lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.058763] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Acquired lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.059160] env[68443]: DEBUG nova.network.neutron [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Refreshing network info cache for port 82df3868-4dab-4503-bc7a-5eecd49a67b6 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 744.079732] env[68443]: DEBUG nova.compute.manager [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Received event network-vif-plugged-6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 744.079964] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Acquiring lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.080180] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.080344] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.080514] env[68443]: DEBUG nova.compute.manager [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] No waiting events found dispatching network-vif-plugged-6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 744.080676] env[68443]: WARNING nova.compute.manager [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Received unexpected event network-vif-plugged-6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 for instance with vm_state building and task_state spawning. [ 744.080832] env[68443]: DEBUG nova.compute.manager [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Received event network-changed-6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 744.080982] env[68443]: DEBUG nova.compute.manager [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Refreshing instance network info cache due to event network-changed-6ecd54b1-edd3-4b8c-b0be-136ec65e0f60. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 744.082028] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Acquiring lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.082028] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Acquired lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.082155] env[68443]: DEBUG nova.network.neutron [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Refreshing network info cache for port 6ecd54b1-edd3-4b8c-b0be-136ec65e0f60 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 744.616202] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f0431b1a-8bf3-44e7-b9d5-92389cd9d936 tempest-ServersAdmin275Test-1137769342 tempest-ServersAdmin275Test-1137769342-project-member] Acquiring lock "88c5636b-e0d5-4db2-8044-aa909a1da0cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.616497] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f0431b1a-8bf3-44e7-b9d5-92389cd9d936 tempest-ServersAdmin275Test-1137769342 tempest-ServersAdmin275Test-1137769342-project-member] Lock "88c5636b-e0d5-4db2-8044-aa909a1da0cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.022564] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ae974ebd-ba16-4632-8c73-899fb8857118 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "ec17824f-18a5-4a44-8f64-33438ee4990d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.023043] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ae974ebd-ba16-4632-8c73-899fb8857118 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "ec17824f-18a5-4a44-8f64-33438ee4990d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.114767] env[68443]: DEBUG nova.network.neutron [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Updated VIF entry in instance network info cache for port 82df3868-4dab-4503-bc7a-5eecd49a67b6. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 745.115350] env[68443]: DEBUG nova.network.neutron [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Updating instance_info_cache with network_info: [{"id": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "address": "fa:16:3e:8f:3c:04", "network": {"id": "6fb247e7-84f2-4cbf-894e-927bcf262e76", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-337888342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "34011520f33549df90bea3aa7c3b6ed4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82df3868-4d", "ovs_interfaceid": "82df3868-4dab-4503-bc7a-5eecd49a67b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.131055] env[68443]: DEBUG oslo_concurrency.lockutils [req-fabed38e-31b5-43e6-b539-610cd32cd7be req-c56a1533-f8d3-44cf-8467-60d30476d33e service nova] Releasing lock "refresh_cache-8fdbd88f-f608-4f26-9076-7d2f6eb67224" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.154617] env[68443]: DEBUG nova.network.neutron [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Updated VIF entry in instance network info cache for port 6ecd54b1-edd3-4b8c-b0be-136ec65e0f60. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 745.154967] env[68443]: DEBUG nova.network.neutron [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Updating instance_info_cache with network_info: [{"id": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "address": "fa:16:3e:70:68:27", "network": {"id": "2929f4f5-4e27-42c8-b9d3-ac83ede3dc8b", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-478369306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cfe5754cc87443159f24335a1bce0c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecd54b1-ed", "ovs_interfaceid": "6ecd54b1-edd3-4b8c-b0be-136ec65e0f60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.167091] env[68443]: DEBUG oslo_concurrency.lockutils [req-42aae054-923a-4a49-a104-c8fa9473aa5d req-9fd310cc-e3ce-4db4-9db3-0d93c6d1d9a8 service nova] Releasing lock "refresh_cache-08a980e1-ca8e-4af3-afbf-bd688e11259f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.869029] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e93e5177-c6eb-431a-88fe-f2e582fdfe11 tempest-ServerMetadataNegativeTestJSON-393042034 tempest-ServerMetadataNegativeTestJSON-393042034-project-member] Acquiring lock "12d34fba-743e-4f1c-aeaf-6914aa5788c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.869029] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e93e5177-c6eb-431a-88fe-f2e582fdfe11 tempest-ServerMetadataNegativeTestJSON-393042034 tempest-ServerMetadataNegativeTestJSON-393042034-project-member] Lock "12d34fba-743e-4f1c-aeaf-6914aa5788c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.443755] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c4345f29-09d5-4e24-b33c-325e40bb6175 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.443980] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c4345f29-09d5-4e24-b33c-325e40bb6175 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.048674] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Acquiring lock "410678da-9177-4822-9d48-a94eeefcd22f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.048973] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "410678da-9177-4822-9d48-a94eeefcd22f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.106694] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Acquiring lock "1c1675e9-0e4d-49d8-bb02-517b6021c35d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.107010] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "1c1675e9-0e4d-49d8-bb02-517b6021c35d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.537483] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5e5c305d-3071-4b15-bb94-41cb6b4b28ec tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Acquiring lock "32a3402f-546a-4e3d-b71d-e4e3b50df6f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.537707] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5e5c305d-3071-4b15-bb94-41cb6b4b28ec tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Lock "32a3402f-546a-4e3d-b71d-e4e3b50df6f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.894571] env[68443]: DEBUG oslo_concurrency.lockutils [None req-10c9e7ab-65e9-40b7-bba9-eed74def74f5 tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Acquiring lock "96d4015d-e7a6-4fcc-8f73-afc928113cff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.894944] env[68443]: DEBUG oslo_concurrency.lockutils [None req-10c9e7ab-65e9-40b7-bba9-eed74def74f5 tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Lock "96d4015d-e7a6-4fcc-8f73-afc928113cff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.049673] env[68443]: DEBUG oslo_concurrency.lockutils [None req-08515451-e63c-49f4-8f2b-9b96cb759d13 tempest-ServerActionsTestJSON-110831187 tempest-ServerActionsTestJSON-110831187-project-member] Acquiring lock "b8f1485f-2f6a-416f-a285-07607283eb08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.049976] env[68443]: DEBUG oslo_concurrency.lockutils [None req-08515451-e63c-49f4-8f2b-9b96cb759d13 tempest-ServerActionsTestJSON-110831187 tempest-ServerActionsTestJSON-110831187-project-member] Lock "b8f1485f-2f6a-416f-a285-07607283eb08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.258141] env[68443]: DEBUG oslo_concurrency.lockutils [None req-300922a4-47ad-4696-9f3d-8947dface6dc tempest-ServersTestManualDisk-339913820 tempest-ServersTestManualDisk-339913820-project-member] Acquiring lock "b473237b-274d-450d-b5a1-63d78b08f6f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.258475] env[68443]: DEBUG oslo_concurrency.lockutils [None req-300922a4-47ad-4696-9f3d-8947dface6dc tempest-ServersTestManualDisk-339913820 tempest-ServersTestManualDisk-339913820-project-member] Lock "b473237b-274d-450d-b5a1-63d78b08f6f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.549353] env[68443]: WARNING oslo_vmware.rw_handles [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 757.549353] env[68443]: ERROR oslo_vmware.rw_handles [ 757.549904] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 757.551091] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 757.552254] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Copying Virtual Disk [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/d8c118af-27b7-4137-81fc-c198199e9334/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 757.552254] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66f0ed60-a2a5-40e5-8b1a-632565d0d1b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.562877] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for the task: (returnval){ [ 757.562877] env[68443]: value = "task-3373905" [ 757.562877] env[68443]: _type = "Task" [ 757.562877] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.575833] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Task: {'id': task-3373905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.073659] env[68443]: DEBUG oslo_vmware.exceptions [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 758.074237] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.077793] env[68443]: ERROR nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 758.077793] env[68443]: Faults: ['InvalidArgument'] [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Traceback (most recent call last): [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] yield resources [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self.driver.spawn(context, instance, image_meta, [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self._fetch_image_if_missing(context, vi) [ 758.077793] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] image_cache(vi, tmp_image_ds_loc) [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] vm_util.copy_virtual_disk( [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] session._wait_for_task(vmdk_copy_task) [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return self.wait_for_task(task_ref) [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return evt.wait() [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] result = hub.switch() [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.078469] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return self.greenlet.switch() [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self.f(*self.args, **self.kw) [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] raise exceptions.translate_fault(task_info.error) [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Faults: ['InvalidArgument'] [ 758.079039] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] [ 758.079039] env[68443]: INFO nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Terminating instance [ 758.080187] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.080413] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.081722] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.081722] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquired lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.081722] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.088021] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0377dd1c-4070-42a5-b7ca-b1cdf999571b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.091271] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.091363] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 758.094511] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0aae804-42b3-4ec0-b87d-5b49b96a08ce {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.100093] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 758.100093] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5236a763-f404-6723-6434-8ceaff7635c4" [ 758.100093] env[68443]: _type = "Task" [ 758.100093] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.118170] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 758.118481] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating directory with path [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.118768] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72408662-c89b-49c0-8452-b150e5ec4bba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.134036] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.139438] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created directory with path [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.139710] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Fetch image to [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 758.140069] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 758.141729] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ce5aaa-7ece-463d-b3ec-a74448ca519c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.150261] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6660a97-800c-4745-b70c-929496869ec8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.168704] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a710bca-5d86-46c8-8281-11d2d555c466 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.201513] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145887f7-5069-4737-a19a-ebd786ca3ca9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.208566] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-542b1911-7245-4be1-afb0-d6ba10da2fff {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.283353] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.299247] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 758.303284] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Releasing lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.303284] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 758.303284] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 758.303863] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139d6ad0-1ff4-4532-8db6-1b8abfabfbc8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.314029] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 758.314282] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25d40614-0fe8-4678-9364-bd304b1dc623 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.341282] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 758.342249] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 758.342249] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Deleting the datastore file [datastore1] bbf050f4-9cf2-49f7-984d-d140f7aac3f5 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.342388] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43972cbd-ceaa-4464-b6e2-578a1b6f70cc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.351309] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for the task: (returnval){ [ 758.351309] env[68443]: value = "task-3373907" [ 758.351309] env[68443]: _type = "Task" [ 758.351309] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.361574] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Task: {'id': task-3373907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.373023] env[68443]: DEBUG oslo_vmware.rw_handles [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 758.439621] env[68443]: DEBUG oslo_vmware.rw_handles [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 758.439854] env[68443]: DEBUG oslo_vmware.rw_handles [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 758.864362] env[68443]: DEBUG oslo_vmware.api [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Task: {'id': task-3373907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03515} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.864362] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 758.864362] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 758.864362] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 758.864362] env[68443]: INFO nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Took 0.56 seconds to destroy the instance on the hypervisor. [ 758.864806] env[68443]: DEBUG oslo.service.loopingcall [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.864806] env[68443]: DEBUG nova.compute.manager [-] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 758.865576] env[68443]: DEBUG nova.compute.claims [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 758.865888] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.866232] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.335954] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a101c631-ca14-4c78-8d82-61f16f515b0c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.344137] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0f8038-82af-41d2-be13-ce1f47c0385a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.380997] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad040d7-20cb-4918-bc31-4ffd1f73e902 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.389450] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcc9baf-0728-4b18-b3ec-68626b4a5144 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.406743] env[68443]: DEBUG nova.compute.provider_tree [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.414905] env[68443]: DEBUG nova.scheduler.client.report [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.433201] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.567s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.433777] env[68443]: ERROR nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 759.433777] env[68443]: Faults: ['InvalidArgument'] [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Traceback (most recent call last): [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self.driver.spawn(context, instance, image_meta, [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self._fetch_image_if_missing(context, vi) [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] image_cache(vi, tmp_image_ds_loc) [ 759.433777] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] vm_util.copy_virtual_disk( [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] session._wait_for_task(vmdk_copy_task) [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return self.wait_for_task(task_ref) [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return evt.wait() [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] result = hub.switch() [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] return self.greenlet.switch() [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 759.434163] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] self.f(*self.args, **self.kw) [ 759.434686] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 759.434686] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] raise exceptions.translate_fault(task_info.error) [ 759.434686] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 759.434686] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Faults: ['InvalidArgument'] [ 759.434686] env[68443]: ERROR nova.compute.manager [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] [ 759.434686] env[68443]: DEBUG nova.compute.utils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 759.438889] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Build of instance bbf050f4-9cf2-49f7-984d-d140f7aac3f5 was re-scheduled: A specified parameter was not correct: fileType [ 759.438889] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 759.439031] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 759.439195] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquiring lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.439354] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Acquired lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.439515] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.473904] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.631573] env[68443]: DEBUG nova.network.neutron [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.644951] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Releasing lock "refresh_cache-bbf050f4-9cf2-49f7-984d-d140f7aac3f5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.645277] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 759.645539] env[68443]: DEBUG nova.compute.manager [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] [instance: bbf050f4-9cf2-49f7-984d-d140f7aac3f5] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 759.800240] env[68443]: INFO nova.scheduler.client.report [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Deleted allocations for instance bbf050f4-9cf2-49f7-984d-d140f7aac3f5 [ 759.832887] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a80221f9-dc8a-4cf7-a64c-fd264349a92b tempest-ServerDiagnosticsV248Test-106752764 tempest-ServerDiagnosticsV248Test-106752764-project-member] Lock "bbf050f4-9cf2-49f7-984d-d140f7aac3f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.002s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.860844] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 759.917193] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.917193] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.921281] env[68443]: INFO nova.compute.claims [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.381023] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1d19cb-4bee-47af-ad4f-cb2b1fd6ebf8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.389667] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967af0ba-1c4a-4dbb-aea7-d20824e78afe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.424040] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bacad6e-6316-4b1b-a281-4f82aee0b664 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.430794] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a8870e-4aa7-4bad-8c82-ceb77c005707 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.445143] env[68443]: DEBUG nova.compute.provider_tree [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.458442] env[68443]: DEBUG nova.scheduler.client.report [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 760.479779] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.562s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.479779] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 760.544694] env[68443]: DEBUG nova.compute.utils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.545986] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 760.546324] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 760.554252] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 760.644561] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 760.686445] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 760.689018] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 760.689018] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.689018] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 760.689018] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.689018] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 760.689529] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 760.689529] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 760.689529] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 760.689689] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 760.689947] env[68443]: DEBUG nova.virt.hardware [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.691191] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06944065-f979-42ab-980a-0712c9879775 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.704319] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db3329f-c2d1-4f6d-9bde-20aa20dce29e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.901367] env[68443]: DEBUG nova.policy [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '358a0d8837c149089d5fa9df3f72a945', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a8518dd13164a47b074bf96894acdbf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 761.039666] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "1c1acc0d-263d-4687-93ff-291d18a592d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.040049] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.368830] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Successfully created port: 87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.156997] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Successfully updated port: 87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.172501] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.172667] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.172819] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 762.232469] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.445654] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Updating instance_info_cache with network_info: [{"id": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "address": "fa:16:3e:b8:02:5f", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2f03d-48", "ovs_interfaceid": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.463011] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.463721] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance network_info: |[{"id": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "address": "fa:16:3e:b8:02:5f", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2f03d-48", "ovs_interfaceid": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 762.464653] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:02:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87b2f03d-4821-4e5b-b9e7-8e1d5a27234e', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.473239] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating folder: Project (3a8518dd13164a47b074bf96894acdbf). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.474406] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-835f6b76-2fdf-4df9-a02c-815bb2f7387c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.486249] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created folder: Project (3a8518dd13164a47b074bf96894acdbf) in parent group-v673136. [ 762.486445] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating folder: Instances. Parent ref: group-v673167. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 762.486670] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a5ef2e8-8f85-4876-a97a-569673dbefc6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.497363] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created folder: Instances in parent group-v673167. [ 762.497600] env[68443]: DEBUG oslo.service.loopingcall [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.497785] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 762.498019] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aeb6987c-c618-4c4d-bcaf-23727508e8ea {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.519714] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.519714] env[68443]: value = "task-3373910" [ 762.519714] env[68443]: _type = "Task" [ 762.519714] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.527268] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373910, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.945616] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d26a334-e904-4e30-9c40-23dbea2eb2ba tempest-ImagesOneServerNegativeTestJSON-1627556834 tempest-ImagesOneServerNegativeTestJSON-1627556834-project-member] Acquiring lock "b436b172-7da8-4753-9e5c-896675ae640c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.945616] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d26a334-e904-4e30-9c40-23dbea2eb2ba tempest-ImagesOneServerNegativeTestJSON-1627556834 tempest-ImagesOneServerNegativeTestJSON-1627556834-project-member] Lock "b436b172-7da8-4753-9e5c-896675ae640c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.030207] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373910, 'name': CreateVM_Task, 'duration_secs': 0.330858} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.030207] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 763.031041] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.031365] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.031971] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.032332] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7df20c-0925-4401-be97-a6002b873f47 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.040018] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 763.040018] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52fc6dc3-5945-477b-84e0-a614cd93a4b1" [ 763.040018] env[68443]: _type = "Task" [ 763.040018] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.047062] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52fc6dc3-5945-477b-84e0-a614cd93a4b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.551730] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.552138] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.552956] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.615922] env[68443]: DEBUG nova.compute.manager [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Received event network-vif-plugged-87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 763.615922] env[68443]: DEBUG oslo_concurrency.lockutils [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] Acquiring lock "63801b63-1601-4e77-a500-3569713177bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.616127] env[68443]: DEBUG oslo_concurrency.lockutils [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] Lock "63801b63-1601-4e77-a500-3569713177bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.616300] env[68443]: DEBUG oslo_concurrency.lockutils [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] Lock "63801b63-1601-4e77-a500-3569713177bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.616482] env[68443]: DEBUG nova.compute.manager [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] No waiting events found dispatching network-vif-plugged-87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 763.616708] env[68443]: WARNING nova.compute.manager [req-6a558ca2-0dd6-464f-ac73-1d96cfa3ba27 req-b038e314-da6c-40f1-9b38-764845afea7b service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Received unexpected event network-vif-plugged-87b2f03d-4821-4e5b-b9e7-8e1d5a27234e for instance with vm_state building and task_state spawning. [ 763.637013] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4e70ac1d-1955-44f2-a0a0-d24892f236af tempest-FloatingIPsAssociationNegativeTestJSON-1020643258 tempest-FloatingIPsAssociationNegativeTestJSON-1020643258-project-member] Acquiring lock "0cf57946-5db0-4c7c-a537-3f35e50c231f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.637249] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4e70ac1d-1955-44f2-a0a0-d24892f236af tempest-FloatingIPsAssociationNegativeTestJSON-1020643258 tempest-FloatingIPsAssociationNegativeTestJSON-1020643258-project-member] Lock "0cf57946-5db0-4c7c-a537-3f35e50c231f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.440861] env[68443]: DEBUG nova.compute.manager [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Received event network-changed-87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 766.441278] env[68443]: DEBUG nova.compute.manager [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Refreshing instance network info cache due to event network-changed-87b2f03d-4821-4e5b-b9e7-8e1d5a27234e. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 766.441278] env[68443]: DEBUG oslo_concurrency.lockutils [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] Acquiring lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.441637] env[68443]: DEBUG oslo_concurrency.lockutils [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] Acquired lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.443582] env[68443]: DEBUG nova.network.neutron [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Refreshing network info cache for port 87b2f03d-4821-4e5b-b9e7-8e1d5a27234e {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 767.009886] env[68443]: DEBUG nova.network.neutron [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Updated VIF entry in instance network info cache for port 87b2f03d-4821-4e5b-b9e7-8e1d5a27234e. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 767.010044] env[68443]: DEBUG nova.network.neutron [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] [instance: 63801b63-1601-4e77-a500-3569713177bd] Updating instance_info_cache with network_info: [{"id": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "address": "fa:16:3e:b8:02:5f", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87b2f03d-48", "ovs_interfaceid": "87b2f03d-4821-4e5b-b9e7-8e1d5a27234e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.024859] env[68443]: DEBUG oslo_concurrency.lockutils [req-14e891db-665d-4406-95c9-626167fd32f4 req-4beba899-3505-4dd0-b6be-be3996b07f92 service nova] Releasing lock "refresh_cache-63801b63-1601-4e77-a500-3569713177bd" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.451628] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a0327573-49f9-4745-aa6c-a13a9b1ef3a3 tempest-ServersTestJSON-1212411784 tempest-ServersTestJSON-1212411784-project-member] Acquiring lock "960a8177-a962-471b-850c-7fdf16544cbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.451910] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a0327573-49f9-4745-aa6c-a13a9b1ef3a3 tempest-ServersTestJSON-1212411784 tempest-ServersTestJSON-1212411784-project-member] Lock "960a8177-a962-471b-850c-7fdf16544cbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.913763] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd5b3b0b-5e1a-47ff-b27e-dbc1314c1ebd tempest-ServerDiagnosticsNegativeTest-621827723 tempest-ServerDiagnosticsNegativeTest-621827723-project-member] Acquiring lock "0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.914214] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd5b3b0b-5e1a-47ff-b27e-dbc1314c1ebd tempest-ServerDiagnosticsNegativeTest-621827723 tempest-ServerDiagnosticsNegativeTest-621827723-project-member] Lock "0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.378062] env[68443]: DEBUG oslo_concurrency.lockutils [None req-34fa30bf-596f-403d-bae3-c4edd8f220c7 tempest-ServerActionsTestOtherB-1356055185 tempest-ServerActionsTestOtherB-1356055185-project-member] Acquiring lock "bc6ee2e0-ed65-4c30-bb37-28436f8d487d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.378420] env[68443]: DEBUG oslo_concurrency.lockutils [None req-34fa30bf-596f-403d-bae3-c4edd8f220c7 tempest-ServerActionsTestOtherB-1356055185 tempest-ServerActionsTestOtherB-1356055185-project-member] Lock "bc6ee2e0-ed65-4c30-bb37-28436f8d487d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.317860] env[68443]: DEBUG oslo_concurrency.lockutils [None req-36d31862-1183-414d-b4f0-a1d408c292de tempest-ServersListShow296Test-31630010 tempest-ServersListShow296Test-31630010-project-member] Acquiring lock "a79985f5-4be3-4b95-a3d3-339b7f25b9e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.317860] env[68443]: DEBUG oslo_concurrency.lockutils [None req-36d31862-1183-414d-b4f0-a1d408c292de tempest-ServersListShow296Test-31630010 tempest-ServersListShow296Test-31630010-project-member] Lock "a79985f5-4be3-4b95-a3d3-339b7f25b9e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.305061] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.330714] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.330927] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 778.331114] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 778.353651] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.353651] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.353651] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cd131349-f678-4271-af79-456624f090d1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.353823] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.353860] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.353969] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.356221] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.356221] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.356221] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.356221] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 778.356221] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 778.356850] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.357028] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.357192] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.357345] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.368801] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.369093] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.369304] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.369498] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 778.370599] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe24b5ed-0345-4cbb-bfec-6ee8090e457e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.379987] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ad14b-c75d-484e-bc16-be1a289566cb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.397192] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90426697-9b92-4ec0-ac93-4753dc1df071 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.403539] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c9811f-eebf-435f-ae52-75c0e676e58c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.433466] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180922MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 778.433633] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.433813] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.511808] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7dd8326b-2ccd-4c27-8fc4-fc7910042870 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.511973] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512125] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cd131349-f678-4271-af79-456624f090d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512250] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0882dec-0d2a-4f62-933d-0d24f3340026 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512375] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512519] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f5aa2b1b-c290-42f2-84d3-272415184f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512603] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512822] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512822] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.512972] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 778.544821] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.572336] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.587574] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2598a7ae-49a0-4bde-bf45-5c28a1eeb6fe tempest-AttachInterfacesV270Test-1131883941 tempest-AttachInterfacesV270Test-1131883941-project-member] Acquiring lock "67780abc-a216-41d2-b531-d31aef150587" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.587805] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2598a7ae-49a0-4bde-bf45-5c28a1eeb6fe tempest-AttachInterfacesV270Test-1131883941 tempest-AttachInterfacesV270Test-1131883941-project-member] Lock "67780abc-a216-41d2-b531-d31aef150587" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.590678] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.606335] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88c5636b-e0d5-4db2-8044-aa909a1da0cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.618594] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ec17824f-18a5-4a44-8f64-33438ee4990d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.629093] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12d34fba-743e-4f1c-aeaf-6914aa5788c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.639420] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.654018] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 410678da-9177-4822-9d48-a94eeefcd22f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.664099] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1675e9-0e4d-49d8-bb02-517b6021c35d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.675116] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 32a3402f-546a-4e3d-b71d-e4e3b50df6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.685609] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 96d4015d-e7a6-4fcc-8f73-afc928113cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.697398] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8f1485f-2f6a-416f-a285-07607283eb08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.708766] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b473237b-274d-450d-b5a1-63d78b08f6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.720582] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.748572] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b436b172-7da8-4753-9e5c-896675ae640c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.766141] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cf57946-5db0-4c7c-a537-3f35e50c231f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.781014] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 960a8177-a962-471b-850c-7fdf16544cbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.791021] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.803389] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bc6ee2e0-ed65-4c30-bb37-28436f8d487d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.812312] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a79985f5-4be3-4b95-a3d3-339b7f25b9e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.822433] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 67780abc-a216-41d2-b531-d31aef150587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 778.822636] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 778.822790] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 779.216426] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c5b3d4-6a46-495a-9eec-8472b32f4d51 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.225964] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d995c37-4222-4ece-bc1c-465e8e611a46 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.258203] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8436ae-4339-47ee-b747-1379634177e8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.265541] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37be281-f082-4cf5-8d5d-2df70d920cab {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.278281] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.291034] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 779.304623] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 779.304802] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.871s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.774184] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.774184] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.774184] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 779.774184] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 779.820695] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.158797] env[68443]: WARNING oslo_vmware.rw_handles [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 807.158797] env[68443]: ERROR oslo_vmware.rw_handles [ 807.159860] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 807.160842] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 807.161127] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Copying Virtual Disk [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/3dda4b20-dd6b-4021-b7a8-ed9f9236a5ed/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 807.161435] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74cd3be3-7284-4ef8-9059-e61ed7aefcd7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.169114] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 807.169114] env[68443]: value = "task-3373911" [ 807.169114] env[68443]: _type = "Task" [ 807.169114] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.177150] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': task-3373911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.680475] env[68443]: DEBUG oslo_vmware.exceptions [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 807.680843] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.681433] env[68443]: ERROR nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 807.681433] env[68443]: Faults: ['InvalidArgument'] [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Traceback (most recent call last): [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] yield resources [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self.driver.spawn(context, instance, image_meta, [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self._fetch_image_if_missing(context, vi) [ 807.681433] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] image_cache(vi, tmp_image_ds_loc) [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] vm_util.copy_virtual_disk( [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] session._wait_for_task(vmdk_copy_task) [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return self.wait_for_task(task_ref) [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return evt.wait() [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] result = hub.switch() [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 807.681963] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return self.greenlet.switch() [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self.f(*self.args, **self.kw) [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] raise exceptions.translate_fault(task_info.error) [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Faults: ['InvalidArgument'] [ 807.682506] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] [ 807.682506] env[68443]: INFO nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Terminating instance [ 807.683792] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.684061] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.684708] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 807.684937] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 807.685205] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c54b4e82-c371-4243-853c-74f513152abe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.687600] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d0a14e-9dca-444e-9d98-3160ace183eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.694737] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 807.698731] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5900eac0-c2a1-4d85-b4df-64f73b30d83c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.700375] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.700592] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 807.702112] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48b3fff6-df6e-4d69-820b-c76f59bd41f1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.711367] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Waiting for the task: (returnval){ [ 807.711367] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52d72aa5-4d6e-0f6d-ea03-0734ee9489f4" [ 807.711367] env[68443]: _type = "Task" [ 807.711367] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.719493] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52d72aa5-4d6e-0f6d-ea03-0734ee9489f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.775329] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 807.776328] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 807.776557] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleting the datastore file [datastore1] 7dd8326b-2ccd-4c27-8fc4-fc7910042870 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.776837] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b572281-bce1-40fa-a666-4a987959683c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.783345] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 807.783345] env[68443]: value = "task-3373913" [ 807.783345] env[68443]: _type = "Task" [ 807.783345] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.791805] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': task-3373913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.222443] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 808.222736] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Creating directory with path [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.222926] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d7b59a5-173d-4291-8578-182796751a26 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.234696] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Created directory with path [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.234892] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Fetch image to [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 808.235074] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 808.235833] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c89951-a66a-4751-87a8-3393c7c0854e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.242460] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f22afc-8fb7-4492-b10f-3d092cd2ff75 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.251392] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd00f16-24d3-47d1-938d-09dbb5c91281 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.281487] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c46c40d-8d86-4669-83c9-b320cd0a7367 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.292508] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-46cb80ac-5f01-4367-8608-96cad1f7fcd1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.294171] env[68443]: DEBUG oslo_vmware.api [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': task-3373913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071809} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.294407] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.294583] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 808.294756] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 808.294934] env[68443]: INFO nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Took 0.61 seconds to destroy the instance on the hypervisor. [ 808.298884] env[68443]: DEBUG nova.compute.claims [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 808.299141] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.299420] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.387559] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 808.450010] env[68443]: DEBUG oslo_vmware.rw_handles [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 808.514272] env[68443]: DEBUG oslo_vmware.rw_handles [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 808.514486] env[68443]: DEBUG oslo_vmware.rw_handles [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 808.780325] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7f954c-7539-414f-bf6e-5a4a8fa154a4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.787986] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37dc79e5-c7d0-4f6d-aff4-4d96027c408c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.817492] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f2536a-e892-4c0d-b500-fcd173a75661 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.824487] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a182e51-072e-4d11-b218-c6679718e95c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.837328] env[68443]: DEBUG nova.compute.provider_tree [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.845809] env[68443]: DEBUG nova.scheduler.client.report [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 808.866423] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.567s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.866949] env[68443]: ERROR nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 808.866949] env[68443]: Faults: ['InvalidArgument'] [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Traceback (most recent call last): [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self.driver.spawn(context, instance, image_meta, [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self._vmops.spawn(context, instance, image_meta, injected_files, [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self._fetch_image_if_missing(context, vi) [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] image_cache(vi, tmp_image_ds_loc) [ 808.866949] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] vm_util.copy_virtual_disk( [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] session._wait_for_task(vmdk_copy_task) [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return self.wait_for_task(task_ref) [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return evt.wait() [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] result = hub.switch() [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] return self.greenlet.switch() [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 808.867372] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] self.f(*self.args, **self.kw) [ 808.867913] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 808.867913] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] raise exceptions.translate_fault(task_info.error) [ 808.867913] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 808.867913] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Faults: ['InvalidArgument'] [ 808.867913] env[68443]: ERROR nova.compute.manager [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] [ 808.867913] env[68443]: DEBUG nova.compute.utils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 808.869145] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Build of instance 7dd8326b-2ccd-4c27-8fc4-fc7910042870 was re-scheduled: A specified parameter was not correct: fileType [ 808.869145] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 808.869517] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 808.869712] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 808.869874] env[68443]: DEBUG nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 808.870052] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 809.398931] env[68443]: DEBUG nova.network.neutron [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.416302] env[68443]: INFO nova.compute.manager [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 7dd8326b-2ccd-4c27-8fc4-fc7910042870] Took 0.55 seconds to deallocate network for instance. [ 809.527087] env[68443]: INFO nova.scheduler.client.report [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleted allocations for instance 7dd8326b-2ccd-4c27-8fc4-fc7910042870 [ 809.549132] env[68443]: DEBUG oslo_concurrency.lockutils [None req-72a48d7f-807d-452f-984c-f846956762d1 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "7dd8326b-2ccd-4c27-8fc4-fc7910042870" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.159s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.578654] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 809.630049] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.630227] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.631886] env[68443]: INFO nova.compute.claims [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.071355] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c0527b-d68f-4cfa-96ef-d1292a1b3c94 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.079006] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3654dfd-2982-4e19-bb04-fc18ae5de83f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.109062] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67391189-e5a5-4345-9e8a-4079fcc1c323 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.116643] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1171967-9607-4a01-987c-bd9eabb06b2b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.131026] env[68443]: DEBUG nova.compute.provider_tree [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.139109] env[68443]: DEBUG nova.scheduler.client.report [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 810.157324] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.527s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.157958] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 810.190975] env[68443]: DEBUG nova.compute.utils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.192845] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 810.193129] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 810.202744] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 810.261326] env[68443]: DEBUG nova.policy [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93b3ddb946df4e9d99d8cbab7585bf1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd099a49659c649549b21d3568129203d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 810.284260] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 810.312991] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 810.313381] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 810.313447] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 810.314028] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 810.314028] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 810.314028] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 810.314168] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 810.314259] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 810.315196] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 810.315196] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 810.315196] env[68443]: DEBUG nova.virt.hardware [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 810.315639] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4422377e-149c-46b2-963d-0f9bf65b4c8f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.324379] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50340f90-3aa7-4c5c-a170-0db3b8077d56 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.596169] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Successfully created port: 1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.261404] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Successfully updated port: 1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.276468] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.276607] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquired lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.276757] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.332073] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.521635] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Updating instance_info_cache with network_info: [{"id": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "address": "fa:16:3e:a3:23:1e", "network": {"id": "e135be23-83b3-41fb-ae86-a0760beac394", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-814891071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d099a49659c649549b21d3568129203d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb70c95-52", "ovs_interfaceid": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.534689] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Releasing lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.534997] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance network_info: |[{"id": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "address": "fa:16:3e:a3:23:1e", "network": {"id": "e135be23-83b3-41fb-ae86-a0760beac394", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-814891071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d099a49659c649549b21d3568129203d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb70c95-52", "ovs_interfaceid": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 811.535427] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:23:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '329d0e4b-4190-484a-8560-9356dc31beca', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1cb70c95-522f-49e3-b581-79d49b1f3d30', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 811.543055] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Creating folder: Project (d099a49659c649549b21d3568129203d). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 811.543630] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a2f8ade-681b-4caa-a939-097c4aacf290 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.554840] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Created folder: Project (d099a49659c649549b21d3568129203d) in parent group-v673136. [ 811.554840] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Creating folder: Instances. Parent ref: group-v673170. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 811.554840] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-193b8ad4-1df7-49dc-8192-de1129bc4a03 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.563490] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Created folder: Instances in parent group-v673170. [ 811.563740] env[68443]: DEBUG oslo.service.loopingcall [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.563905] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df57929-1115-4080-8131-8960525eb833] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 811.564115] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-840869c5-1885-47f6-93a6-fae23d8986fd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.582959] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 811.582959] env[68443]: value = "task-3373916" [ 811.582959] env[68443]: _type = "Task" [ 811.582959] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.590189] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373916, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.926156] env[68443]: DEBUG nova.compute.manager [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Received event network-vif-plugged-1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 811.926156] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Acquiring lock "6df57929-1115-4080-8131-8960525eb833-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.926462] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Lock "6df57929-1115-4080-8131-8960525eb833-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.926649] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Lock "6df57929-1115-4080-8131-8960525eb833-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.926855] env[68443]: DEBUG nova.compute.manager [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] No waiting events found dispatching network-vif-plugged-1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 811.927143] env[68443]: WARNING nova.compute.manager [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Received unexpected event network-vif-plugged-1cb70c95-522f-49e3-b581-79d49b1f3d30 for instance with vm_state building and task_state spawning. [ 811.927362] env[68443]: DEBUG nova.compute.manager [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Received event network-changed-1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 811.927561] env[68443]: DEBUG nova.compute.manager [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Refreshing instance network info cache due to event network-changed-1cb70c95-522f-49e3-b581-79d49b1f3d30. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 811.927824] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Acquiring lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.928021] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Acquired lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.928221] env[68443]: DEBUG nova.network.neutron [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Refreshing network info cache for port 1cb70c95-522f-49e3-b581-79d49b1f3d30 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 812.093538] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373916, 'name': CreateVM_Task, 'duration_secs': 0.308543} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.093705] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df57929-1115-4080-8131-8960525eb833] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 812.094382] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.095053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.095053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 812.095163] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46a0c7b4-626c-42ff-af89-4a88f37924a8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.101538] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for the task: (returnval){ [ 812.101538] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52ff297f-206a-9134-03e7-6fc425a1b3f8" [ 812.101538] env[68443]: _type = "Task" [ 812.101538] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.109015] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52ff297f-206a-9134-03e7-6fc425a1b3f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.193781] env[68443]: DEBUG nova.network.neutron [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Updated VIF entry in instance network info cache for port 1cb70c95-522f-49e3-b581-79d49b1f3d30. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 812.194142] env[68443]: DEBUG nova.network.neutron [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] [instance: 6df57929-1115-4080-8131-8960525eb833] Updating instance_info_cache with network_info: [{"id": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "address": "fa:16:3e:a3:23:1e", "network": {"id": "e135be23-83b3-41fb-ae86-a0760beac394", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-814891071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d099a49659c649549b21d3568129203d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "329d0e4b-4190-484a-8560-9356dc31beca", "external-id": "nsx-vlan-transportzone-29", "segmentation_id": 29, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1cb70c95-52", "ovs_interfaceid": "1cb70c95-522f-49e3-b581-79d49b1f3d30", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.203395] env[68443]: DEBUG oslo_concurrency.lockutils [req-e4ff13c2-d8c2-4b07-a48e-13a1b8706cf9 req-f2cc7836-13be-4dcd-adb8-734ae97596fd service nova] Releasing lock "refresh_cache-6df57929-1115-4080-8131-8960525eb833" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.557561] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.557846] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.611897] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.612171] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 812.612384] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.825814] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.824611] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.824778] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 838.824901] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 838.846016] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846295] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cd131349-f678-4271-af79-456624f090d1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846366] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846484] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846732] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846732] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846836] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.846944] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.847079] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.847197] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 838.847314] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 838.847755] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.847934] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.848078] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 839.825491] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 839.825736] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 839.825901] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 839.826070] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 839.837823] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.838008] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.838211] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.838386] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 839.839483] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e10dfd7-b456-4d0b-b9de-2c4e7e0db862 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.848185] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57456c08-399a-419f-b009-74437972892b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.862244] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d337f28-d81f-4fa2-82ce-a33a56cf1795 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.868863] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fca01f3-2b3c-4842-97c6-0ea432615f7d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.899064] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181013MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 839.899230] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.899387] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.972675] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.972836] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cd131349-f678-4271-af79-456624f090d1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.972967] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0882dec-0d2a-4f62-933d-0d24f3340026 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973156] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973223] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f5aa2b1b-c290-42f2-84d3-272415184f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973340] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973458] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973574] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.973689] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.974021] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 839.985971] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 839.997078] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.010277] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88c5636b-e0d5-4db2-8044-aa909a1da0cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.020546] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ec17824f-18a5-4a44-8f64-33438ee4990d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.031131] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12d34fba-743e-4f1c-aeaf-6914aa5788c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.040544] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.050126] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 410678da-9177-4822-9d48-a94eeefcd22f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.061355] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1675e9-0e4d-49d8-bb02-517b6021c35d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.072094] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 32a3402f-546a-4e3d-b71d-e4e3b50df6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.081339] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 96d4015d-e7a6-4fcc-8f73-afc928113cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.090611] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8f1485f-2f6a-416f-a285-07607283eb08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.099508] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b473237b-274d-450d-b5a1-63d78b08f6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.110900] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.121078] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b436b172-7da8-4753-9e5c-896675ae640c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.131046] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cf57946-5db0-4c7c-a537-3f35e50c231f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.140698] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 960a8177-a962-471b-850c-7fdf16544cbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.153831] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.163966] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bc6ee2e0-ed65-4c30-bb37-28436f8d487d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.174445] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a79985f5-4be3-4b95-a3d3-339b7f25b9e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.184187] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 67780abc-a216-41d2-b531-d31aef150587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.193848] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 840.194120] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 840.194296] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 840.557536] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088b7a8c-7bee-4c0c-8609-9fd295605f6f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.565418] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb938c9-4aa0-41be-a9be-97c09e44140f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.595927] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7368a2b-0319-4e79-9ed9-64af3c53b0cc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.602774] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71948380-3766-4874-a657-df6e16824a3e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.616219] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.625846] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.643081] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 840.643286] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.744s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.642423] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 853.816095] env[68443]: WARNING oslo_vmware.rw_handles [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 853.816095] env[68443]: ERROR oslo_vmware.rw_handles [ 853.816737] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 853.818658] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 853.818918] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Copying Virtual Disk [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/b6fde654-9cba-4718-95ec-495ce1cf4551/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 853.819518] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec36322f-adb6-4e75-9528-ec6ffecddf63 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.829218] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Waiting for the task: (returnval){ [ 853.829218] env[68443]: value = "task-3373917" [ 853.829218] env[68443]: _type = "Task" [ 853.829218] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.837244] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Task: {'id': task-3373917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.340319] env[68443]: DEBUG oslo_vmware.exceptions [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 854.340618] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.341147] env[68443]: ERROR nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 854.341147] env[68443]: Faults: ['InvalidArgument'] [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] Traceback (most recent call last): [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] yield resources [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self.driver.spawn(context, instance, image_meta, [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self._fetch_image_if_missing(context, vi) [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 854.341147] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] image_cache(vi, tmp_image_ds_loc) [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] vm_util.copy_virtual_disk( [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] session._wait_for_task(vmdk_copy_task) [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return self.wait_for_task(task_ref) [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return evt.wait() [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] result = hub.switch() [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return self.greenlet.switch() [ 854.341446] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self.f(*self.args, **self.kw) [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] raise exceptions.translate_fault(task_info.error) [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] Faults: ['InvalidArgument'] [ 854.341753] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] [ 854.341753] env[68443]: INFO nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Terminating instance [ 854.343175] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.343354] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.344115] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 854.344400] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.344686] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af885697-fe87-41df-95af-58dea10f767c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.347240] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8207594e-e621-4c9a-8afc-0316f16716d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.354183] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 854.354414] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f36cc23-02b9-4bac-8db4-8f285597619f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.356882] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.357166] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 854.358270] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51341e43-ff0b-4b68-897a-b26f4bba49c6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.362934] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Waiting for the task: (returnval){ [ 854.362934] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52b02b48-e741-dcee-206b-381739d05cb3" [ 854.362934] env[68443]: _type = "Task" [ 854.362934] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.370019] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52b02b48-e741-dcee-206b-381739d05cb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.439389] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 854.439682] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 854.439944] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Deleting the datastore file [datastore1] cd131349-f678-4271-af79-456624f090d1 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.441561] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ca69362-99de-4e6e-bcc7-5592a4b9ea34 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.446263] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Waiting for the task: (returnval){ [ 854.446263] env[68443]: value = "task-3373919" [ 854.446263] env[68443]: _type = "Task" [ 854.446263] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.458521] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Task: {'id': task-3373919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.873499] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 854.873751] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Creating directory with path [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.874009] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3da66d60-cac5-4a7a-85f0-2ab85c373e58 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.886400] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Created directory with path [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.886595] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Fetch image to [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 854.886776] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 854.887525] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e58b3c-4c18-4e58-89e7-b75032e6fdac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.893879] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f644322d-94bd-4a6a-85fb-0d4d974a90b7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.902692] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7a929a-8eae-46f8-8399-ca52cbb2480b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.932134] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d6fad3-4b02-4196-a992-70c29f0776fd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.937690] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-69758582-916c-457c-b0fb-0c92db37e09d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.953693] env[68443]: DEBUG oslo_vmware.api [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Task: {'id': task-3373919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071019} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.953956] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.954159] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 854.954330] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.954502] env[68443]: INFO nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 854.956568] env[68443]: DEBUG nova.compute.claims [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 854.956734] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.956981] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.961744] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 855.039323] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 855.103683] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 855.103873] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 855.439460] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ab9943-720f-4ba5-963b-ebcebc4ab648 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.447148] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e094d0ca-c2d0-43c3-8e1f-54e2a3743870 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.476697] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd57175-95d4-4b75-a8ea-03de97a7ffc8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.483548] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5228d764-05e2-4512-98d9-6873c229ad57 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.496143] env[68443]: DEBUG nova.compute.provider_tree [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.504587] env[68443]: DEBUG nova.scheduler.client.report [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.517714] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.561s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.518241] env[68443]: ERROR nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 855.518241] env[68443]: Faults: ['InvalidArgument'] [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] Traceback (most recent call last): [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self.driver.spawn(context, instance, image_meta, [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self._fetch_image_if_missing(context, vi) [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] image_cache(vi, tmp_image_ds_loc) [ 855.518241] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] vm_util.copy_virtual_disk( [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] session._wait_for_task(vmdk_copy_task) [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return self.wait_for_task(task_ref) [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return evt.wait() [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] result = hub.switch() [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] return self.greenlet.switch() [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 855.518543] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] self.f(*self.args, **self.kw) [ 855.518824] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 855.518824] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] raise exceptions.translate_fault(task_info.error) [ 855.518824] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 855.518824] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] Faults: ['InvalidArgument'] [ 855.518824] env[68443]: ERROR nova.compute.manager [instance: cd131349-f678-4271-af79-456624f090d1] [ 855.518937] env[68443]: DEBUG nova.compute.utils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 855.520469] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Build of instance cd131349-f678-4271-af79-456624f090d1 was re-scheduled: A specified parameter was not correct: fileType [ 855.520469] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 855.520847] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 855.521032] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 855.521213] env[68443]: DEBUG nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 855.521379] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.848197] env[68443]: DEBUG nova.network.neutron [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.858372] env[68443]: INFO nova.compute.manager [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: cd131349-f678-4271-af79-456624f090d1] Took 0.34 seconds to deallocate network for instance. [ 855.952295] env[68443]: INFO nova.scheduler.client.report [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Deleted allocations for instance cd131349-f678-4271-af79-456624f090d1 [ 855.974862] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd9d9d34-b204-4bfd-9e50-0c73cfd08c5c tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "cd131349-f678-4271-af79-456624f090d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.223s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.992102] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 856.046471] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.046721] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.048219] env[68443]: INFO nova.compute.claims [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.449345] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bc03de-8a90-4b54-9057-4f4c5c3e8fa1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.457455] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc62d45-c7c2-43d9-b11b-ef2c91fce9ac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.487666] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7cd723a-316a-4b71-9224-67d1c83a8e6d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.494756] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2a00db-bd99-4b6f-9391-54199d0a1383 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.507525] env[68443]: DEBUG nova.compute.provider_tree [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.518333] env[68443]: DEBUG nova.scheduler.client.report [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 856.530462] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.484s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.530936] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 856.567051] env[68443]: DEBUG nova.compute.utils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.567051] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 856.567051] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 856.580517] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 856.633622] env[68443]: DEBUG nova.policy [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eec524e0a2a440c88ea79871a92b85cc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fec80bc424c94adf9cd34ef8ec90da1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.653408] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 856.682525] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 856.682748] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 856.682928] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 856.683134] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 856.683282] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 856.683426] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 856.683627] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 856.683780] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 856.683940] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 856.684114] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 856.684285] env[68443]: DEBUG nova.virt.hardware [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 856.685141] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fb6460-7e44-4e8f-898e-ee55d5ffeccf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.693355] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a06fef0-70a6-4539-908f-5507418b3f77 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.945144] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Successfully created port: b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.685961] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Successfully updated port: b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 857.696438] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.696776] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquired lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.696776] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.736531] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.913442] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Updating instance_info_cache with network_info: [{"id": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "address": "fa:16:3e:b2:2f:79", "network": {"id": "3cdad5f0-55e4-4e49-93bf-0292762a25dc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2072502436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec80bc424c94adf9cd34ef8ec90da1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed82fc44-f216-40a7-a68b-62d76ffdb1f8", "external-id": "nsx-vlan-transportzone-236", "segmentation_id": 236, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a3b2a7-29", "ovs_interfaceid": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.927829] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Releasing lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.928147] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance network_info: |[{"id": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "address": "fa:16:3e:b2:2f:79", "network": {"id": "3cdad5f0-55e4-4e49-93bf-0292762a25dc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2072502436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec80bc424c94adf9cd34ef8ec90da1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed82fc44-f216-40a7-a68b-62d76ffdb1f8", "external-id": "nsx-vlan-transportzone-236", "segmentation_id": 236, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a3b2a7-29", "ovs_interfaceid": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 857.928551] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:2f:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed82fc44-f216-40a7-a68b-62d76ffdb1f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6a3b2a7-2915-4601-8e4c-a031238d18b0', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.936257] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Creating folder: Project (fec80bc424c94adf9cd34ef8ec90da1a). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 857.936804] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e4f809f-5b8c-4e25-acd1-2292b61e2064 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.948481] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Created folder: Project (fec80bc424c94adf9cd34ef8ec90da1a) in parent group-v673136. [ 857.948671] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Creating folder: Instances. Parent ref: group-v673173. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 857.948896] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9087a921-994a-4d7f-b6cd-74a82ca090c2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.958170] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Created folder: Instances in parent group-v673173. [ 857.958402] env[68443]: DEBUG oslo.service.loopingcall [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.959249] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 857.959249] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cccfb7c7-d1f6-42bf-b4b0-b412ab34e543 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.978159] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.978159] env[68443]: value = "task-3373922" [ 857.978159] env[68443]: _type = "Task" [ 857.978159] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.988319] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373922, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.083470] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4371b7a8-64b0-4bae-af4b-23156b161e8e tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "aabad6b5-1bf9-44ed-8fee-60a06c46f890" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.083711] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4371b7a8-64b0-4bae-af4b-23156b161e8e tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "aabad6b5-1bf9-44ed-8fee-60a06c46f890" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.488486] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373922, 'name': CreateVM_Task, 'duration_secs': 0.317353} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.488658] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 858.489344] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.489507] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.489808] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.490063] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051c5588-0c0d-40be-8dab-0dcdfd76e9bd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.494465] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for the task: (returnval){ [ 858.494465] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52c3e017-09d6-2c46-c2b3-fbc9e81a32b6" [ 858.494465] env[68443]: _type = "Task" [ 858.494465] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.501610] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52c3e017-09d6-2c46-c2b3-fbc9e81a32b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.590906] env[68443]: DEBUG nova.compute.manager [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Received event network-vif-plugged-b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 858.591166] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Acquiring lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.591378] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.591578] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.591786] env[68443]: DEBUG nova.compute.manager [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] No waiting events found dispatching network-vif-plugged-b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 858.592041] env[68443]: WARNING nova.compute.manager [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Received unexpected event network-vif-plugged-b6a3b2a7-2915-4601-8e4c-a031238d18b0 for instance with vm_state building and task_state spawning. [ 858.592220] env[68443]: DEBUG nova.compute.manager [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Received event network-changed-b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 858.592379] env[68443]: DEBUG nova.compute.manager [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Refreshing instance network info cache due to event network-changed-b6a3b2a7-2915-4601-8e4c-a031238d18b0. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 858.592563] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Acquiring lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.592768] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Acquired lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.592962] env[68443]: DEBUG nova.network.neutron [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Refreshing network info cache for port b6a3b2a7-2915-4601-8e4c-a031238d18b0 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 858.855976] env[68443]: DEBUG nova.network.neutron [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Updated VIF entry in instance network info cache for port b6a3b2a7-2915-4601-8e4c-a031238d18b0. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 858.856348] env[68443]: DEBUG nova.network.neutron [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Updating instance_info_cache with network_info: [{"id": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "address": "fa:16:3e:b2:2f:79", "network": {"id": "3cdad5f0-55e4-4e49-93bf-0292762a25dc", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2072502436-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fec80bc424c94adf9cd34ef8ec90da1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed82fc44-f216-40a7-a68b-62d76ffdb1f8", "external-id": "nsx-vlan-transportzone-236", "segmentation_id": 236, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6a3b2a7-29", "ovs_interfaceid": "b6a3b2a7-2915-4601-8e4c-a031238d18b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.868449] env[68443]: DEBUG oslo_concurrency.lockutils [req-a26ecc50-bb2c-48a1-bd99-d68b0270756e req-a5271035-0db5-4f3b-b2cb-ef9fdefbb834 service nova] Releasing lock "refresh_cache-844f2b9d-ad2a-431a-a587-65ba446d571f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.005093] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.005226] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 859.005392] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.825300] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.825592] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.825866] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.825975] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 899.821566] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.844832] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.825381] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 900.825555] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 900.825692] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 900.846255] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.846548] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.846548] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.846672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.846796] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.846919] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.847613] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.847613] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.847613] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.847613] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 900.847613] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 900.848088] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.824531] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.824751] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 901.837798] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.837951] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.838134] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.838292] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 901.839404] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818f6e6d-d22c-42d5-9450-b38506985987 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.848192] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4985f43d-16f7-4c00-953f-8f12b62eb259 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.861956] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc77b84-cb28-4f46-a49a-a36dfb6a7a45 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.868273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207757d3-9d3f-4930-b9d9-d1a3e0ed746f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.897367] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180981MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 901.897484] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.897683] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.974806] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.974970] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0882dec-0d2a-4f62-933d-0d24f3340026 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975107] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975231] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f5aa2b1b-c290-42f2-84d3-272415184f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975349] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975463] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975576] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975689] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975803] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.975916] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 901.988221] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.998657] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88c5636b-e0d5-4db2-8044-aa909a1da0cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.009494] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ec17824f-18a5-4a44-8f64-33438ee4990d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.018721] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12d34fba-743e-4f1c-aeaf-6914aa5788c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.027981] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.038932] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 410678da-9177-4822-9d48-a94eeefcd22f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.050874] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1675e9-0e4d-49d8-bb02-517b6021c35d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.060317] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 32a3402f-546a-4e3d-b71d-e4e3b50df6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.070996] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 96d4015d-e7a6-4fcc-8f73-afc928113cff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.079754] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8f1485f-2f6a-416f-a285-07607283eb08 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.089794] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b473237b-274d-450d-b5a1-63d78b08f6f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.099669] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.109800] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b436b172-7da8-4753-9e5c-896675ae640c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.119860] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cf57946-5db0-4c7c-a537-3f35e50c231f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.129771] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 960a8177-a962-471b-850c-7fdf16544cbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.139316] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.148451] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bc6ee2e0-ed65-4c30-bb37-28436f8d487d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.158538] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a79985f5-4be3-4b95-a3d3-339b7f25b9e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.170023] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 67780abc-a216-41d2-b531-d31aef150587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.180625] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.190092] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aabad6b5-1bf9-44ed-8fee-60a06c46f890 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.190337] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 902.191158] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 902.528016] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acfb9ce-904f-4264-ad52-d3f9c48b2bc4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.534218] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3786b313-ff91-47c3-9227-3fec1a8906a4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.568559] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a043f1c-15e1-4e28-8a96-879a26976d52 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.576142] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a9d536-2ada-4721-991c-7b5a8de057e1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.589117] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.597411] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.602733] env[68443]: WARNING oslo_vmware.rw_handles [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 902.602733] env[68443]: ERROR oslo_vmware.rw_handles [ 902.603413] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 902.605371] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 902.605711] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Copying Virtual Disk [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/0fdd7d02-9528-4d0b-9f50-52935090be3a/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 902.606294] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b855b0a-d7b2-4e09-99e9-ea8b25ad9d2e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.610998] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 902.611316] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.714s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.613384] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Waiting for the task: (returnval){ [ 902.613384] env[68443]: value = "task-3373923" [ 902.613384] env[68443]: _type = "Task" [ 902.613384] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.621977] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Task: {'id': task-3373923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.124828] env[68443]: DEBUG oslo_vmware.exceptions [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 903.125171] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.125669] env[68443]: ERROR nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.125669] env[68443]: Faults: ['InvalidArgument'] [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Traceback (most recent call last): [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] yield resources [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self.driver.spawn(context, instance, image_meta, [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self._fetch_image_if_missing(context, vi) [ 903.125669] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] image_cache(vi, tmp_image_ds_loc) [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] vm_util.copy_virtual_disk( [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] session._wait_for_task(vmdk_copy_task) [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return self.wait_for_task(task_ref) [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return evt.wait() [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] result = hub.switch() [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 903.125984] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return self.greenlet.switch() [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self.f(*self.args, **self.kw) [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] raise exceptions.translate_fault(task_info.error) [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Faults: ['InvalidArgument'] [ 903.126295] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] [ 903.126295] env[68443]: INFO nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Terminating instance [ 903.127571] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.127777] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.128620] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 903.128816] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 903.129051] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a61ebff-a549-4432-8ed1-d8dc613c125b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.131463] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbdcd1e-5be0-4065-9fa0-712e81a376c1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.138277] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 903.138500] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d070ced-7a29-43b4-bb3b-e0a1456b9ff0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.140613] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.140790] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 903.141727] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8297397a-ec9f-4176-901a-a1f4e27a6f51 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.147254] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for the task: (returnval){ [ 903.147254] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520c9ed4-4ab5-3b1a-5fc7-738dcc08bb14" [ 903.147254] env[68443]: _type = "Task" [ 903.147254] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.153777] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520c9ed4-4ab5-3b1a-5fc7-738dcc08bb14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.209391] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 903.209634] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 903.209848] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Deleting the datastore file [datastore1] 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.210148] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34343521-3587-40df-8f74-2213a97f5ac8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.216706] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Waiting for the task: (returnval){ [ 903.216706] env[68443]: value = "task-3373925" [ 903.216706] env[68443]: _type = "Task" [ 903.216706] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.224689] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Task: {'id': task-3373925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.611641] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.656723] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 903.656967] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Creating directory with path [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.657212] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c07cbd8f-bde7-4fb0-8456-627d441c8de0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.669146] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Created directory with path [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.669687] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Fetch image to [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 903.669687] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 903.670229] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de07ef38-2a73-4113-9404-bce8ba7e5bca {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.676838] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3e26b3-1aec-42e5-abd3-3205bc687b10 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.686563] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366a2425-94c9-4652-bab5-d2762bd1501e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.721413] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac6ace8-2fc5-4d1f-99ec-25d2f1bc0f21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.732811] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bf1c39f-a266-43df-8ec2-068b39dd61d1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.734577] env[68443]: DEBUG oslo_vmware.api [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Task: {'id': task-3373925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07891} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.734817] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.734992] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 903.735971] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 903.736195] env[68443]: INFO nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 903.738546] env[68443]: DEBUG nova.compute.claims [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 903.738727] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.738937] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.761574] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 903.926794] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 903.991976] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 903.992171] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 904.248704] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3c43b3-551a-48ee-91fa-5586a361010a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.255645] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa1cad1-4a66-4e84-97bf-38c0155527ae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.287675] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ce9264-0fe6-44ba-bd7b-c26a673fd166 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.296045] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cc9e8f-eca4-4f19-9092-ac01a089d14d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.310517] env[68443]: DEBUG nova.compute.provider_tree [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.319725] env[68443]: DEBUG nova.scheduler.client.report [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.336712] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.598s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.337312] env[68443]: ERROR nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.337312] env[68443]: Faults: ['InvalidArgument'] [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Traceback (most recent call last): [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self.driver.spawn(context, instance, image_meta, [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self._fetch_image_if_missing(context, vi) [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] image_cache(vi, tmp_image_ds_loc) [ 904.337312] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] vm_util.copy_virtual_disk( [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] session._wait_for_task(vmdk_copy_task) [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return self.wait_for_task(task_ref) [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return evt.wait() [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] result = hub.switch() [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] return self.greenlet.switch() [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 904.337666] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] self.f(*self.args, **self.kw) [ 904.337979] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 904.337979] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] raise exceptions.translate_fault(task_info.error) [ 904.337979] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.337979] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Faults: ['InvalidArgument'] [ 904.337979] env[68443]: ERROR nova.compute.manager [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] [ 904.338914] env[68443]: DEBUG nova.compute.utils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 904.340027] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Build of instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf was re-scheduled: A specified parameter was not correct: fileType [ 904.340027] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 904.340483] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 904.340776] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 904.340987] env[68443]: DEBUG nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 904.341208] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.665462] env[68443]: DEBUG nova.network.neutron [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.678214] env[68443]: INFO nova.compute.manager [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] [instance: 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf] Took 0.34 seconds to deallocate network for instance. [ 904.786060] env[68443]: INFO nova.scheduler.client.report [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Deleted allocations for instance 3c9187ce-bad4-4634-bd67-7a3e7a4cacaf [ 904.804787] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5f3b621c-9211-4886-ae27-c553e858f7fc tempest-ServerDiagnosticsTest-2002637705 tempest-ServerDiagnosticsTest-2002637705-project-member] Lock "3c9187ce-bad4-4634-bd67-7a3e7a4cacaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.299s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.826977] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 904.882047] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.882047] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.882047] env[68443]: INFO nova.compute.claims [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.272562] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905a6513-05b3-43fc-9a20-30e390b7f4d8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.280512] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00a2f2e-f3d3-4dff-b30c-745977bfeb9f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.310375] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0693878c-b09c-4446-a369-dfe978d4bb6b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.318090] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cd834e-5ef4-437d-af9f-980871ef3e2f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.331801] env[68443]: DEBUG nova.compute.provider_tree [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.341914] env[68443]: DEBUG nova.scheduler.client.report [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.355222] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.475s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.355718] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 905.394172] env[68443]: DEBUG nova.compute.utils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.397576] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 905.397576] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.407772] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 905.465066] env[68443]: DEBUG nova.policy [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ad84fada32d419ea6ee7e9d1ab8aa01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3e79d2610f024d9db339c859900fa651', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 905.479658] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 905.507489] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.507744] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.507905] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.508106] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.508413] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.508590] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.508803] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.508965] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.509148] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.509314] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.509515] env[68443]: DEBUG nova.virt.hardware [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.510426] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55bb997-3bd3-4ab8-acaa-9659f4c19f07 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.518543] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e374cb9-e7f7-4f3b-921b-79b2269b3ce7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.767533] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Successfully created port: cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.427862] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Successfully updated port: cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.438942] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.439110] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquired lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.439261] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.483338] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.657056] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Updating instance_info_cache with network_info: [{"id": "cbafce85-b312-4941-abda-a7a0df3111fc", "address": "fa:16:3e:51:6f:17", "network": {"id": "36c6dd6d-77cc-401e-a0f1-e91d2812f43a", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-622924746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e79d2610f024d9db339c859900fa651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbafce85-b3", "ovs_interfaceid": "cbafce85-b312-4941-abda-a7a0df3111fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.667411] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Releasing lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.667778] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance network_info: |[{"id": "cbafce85-b312-4941-abda-a7a0df3111fc", "address": "fa:16:3e:51:6f:17", "network": {"id": "36c6dd6d-77cc-401e-a0f1-e91d2812f43a", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-622924746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e79d2610f024d9db339c859900fa651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbafce85-b3", "ovs_interfaceid": "cbafce85-b312-4941-abda-a7a0df3111fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 906.668126] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:6f:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4d3f69a-b086-4c3b-b976-5a848b63dfc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbafce85-b312-4941-abda-a7a0df3111fc', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.675962] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Creating folder: Project (3e79d2610f024d9db339c859900fa651). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 906.676537] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1ea6be0-bdef-4270-a026-c6630b0ae62c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.688277] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Created folder: Project (3e79d2610f024d9db339c859900fa651) in parent group-v673136. [ 906.688591] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Creating folder: Instances. Parent ref: group-v673176. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 906.688727] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac1b98c2-b19d-4c17-b3b9-130924bf7e81 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.697712] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Created folder: Instances in parent group-v673176. [ 906.697935] env[68443]: DEBUG oslo.service.loopingcall [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.698133] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 906.698328] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96b4ff86-c3c0-4fa0-ae8a-3e7a2193a98d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.718564] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.718564] env[68443]: value = "task-3373928" [ 906.718564] env[68443]: _type = "Task" [ 906.718564] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.725791] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373928, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.002220] env[68443]: DEBUG nova.compute.manager [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Received event network-vif-plugged-cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 907.002768] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Acquiring lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.002768] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.002768] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.003028] env[68443]: DEBUG nova.compute.manager [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] No waiting events found dispatching network-vif-plugged-cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 907.003174] env[68443]: WARNING nova.compute.manager [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Received unexpected event network-vif-plugged-cbafce85-b312-4941-abda-a7a0df3111fc for instance with vm_state building and task_state spawning. [ 907.003275] env[68443]: DEBUG nova.compute.manager [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Received event network-changed-cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 907.003414] env[68443]: DEBUG nova.compute.manager [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Refreshing instance network info cache due to event network-changed-cbafce85-b312-4941-abda-a7a0df3111fc. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 907.003601] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Acquiring lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.003736] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Acquired lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.003892] env[68443]: DEBUG nova.network.neutron [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Refreshing network info cache for port cbafce85-b312-4941-abda-a7a0df3111fc {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 907.230501] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373928, 'name': CreateVM_Task, 'duration_secs': 0.335704} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.230680] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 907.231504] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.231747] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.232652] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.232652] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aca6378-3057-42ba-8739-dac568c39e12 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.237148] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for the task: (returnval){ [ 907.237148] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52df96e9-b944-d0e3-1025-f9c02897620f" [ 907.237148] env[68443]: _type = "Task" [ 907.237148] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.245896] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52df96e9-b944-d0e3-1025-f9c02897620f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.283553] env[68443]: DEBUG nova.network.neutron [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Updated VIF entry in instance network info cache for port cbafce85-b312-4941-abda-a7a0df3111fc. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 907.283919] env[68443]: DEBUG nova.network.neutron [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Updating instance_info_cache with network_info: [{"id": "cbafce85-b312-4941-abda-a7a0df3111fc", "address": "fa:16:3e:51:6f:17", "network": {"id": "36c6dd6d-77cc-401e-a0f1-e91d2812f43a", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-622924746-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3e79d2610f024d9db339c859900fa651", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4d3f69a-b086-4c3b-b976-5a848b63dfc4", "external-id": "nsx-vlan-transportzone-627", "segmentation_id": 627, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbafce85-b3", "ovs_interfaceid": "cbafce85-b312-4941-abda-a7a0df3111fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.296661] env[68443]: DEBUG oslo_concurrency.lockutils [req-86a96116-aa4d-413b-b88c-12569dc2509a req-6de9dd9a-447a-4823-86c9-3f5536db128a service nova] Releasing lock "refresh_cache-cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.748911] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.749228] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.749270] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.583333] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "b0882dec-0d2a-4f62-933d-0d24f3340026" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.978416] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "3842d98e-d971-456c-b287-53c513285acf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.979098] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.857341] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.719517] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "f5aa2b1b-c290-42f2-84d3-272415184f14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.497127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.462995] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.015807] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.432346] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "63801b63-1601-4e77-a500-3569713177bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.563637] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "844f2b9d-ad2a-431a-a587-65ba446d571f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.954879] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "6df57929-1115-4080-8131-8960525eb833" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.289488] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.343215] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6e162408-6d3d-42e0-8992-f5843e9e7855" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.343873] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.462422] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Acquiring lock "767f1f71-6b02-4b88-83bf-dcbf9d87b895" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.462715] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "767f1f71-6b02-4b88-83bf-dcbf9d87b895" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.498227] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Acquiring lock "95bd5db1-4fff-45a2-84a0-6dd35c8463a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.498786] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "95bd5db1-4fff-45a2-84a0-6dd35c8463a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.217021] env[68443]: WARNING oslo_vmware.rw_handles [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 952.217021] env[68443]: ERROR oslo_vmware.rw_handles [ 952.217021] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 952.217997] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 952.218735] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Copying Virtual Disk [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/bd23e8f5-b880-4619-bedf-3e86911a3f61/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 952.219179] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74647451-e416-473c-9ade-aa0d5cc8a725 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.232396] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for the task: (returnval){ [ 952.232396] env[68443]: value = "task-3373929" [ 952.232396] env[68443]: _type = "Task" [ 952.232396] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.245401] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Task: {'id': task-3373929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.367514] env[68443]: DEBUG oslo_concurrency.lockutils [None req-76a4bab5-8f17-482f-a227-c2ffcbed392f tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Acquiring lock "ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.368288] env[68443]: DEBUG oslo_concurrency.lockutils [None req-76a4bab5-8f17-482f-a227-c2ffcbed392f tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Lock "ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.745686] env[68443]: DEBUG oslo_vmware.exceptions [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 952.746395] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.748592] env[68443]: ERROR nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 952.748592] env[68443]: Faults: ['InvalidArgument'] [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Traceback (most recent call last): [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] yield resources [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self.driver.spawn(context, instance, image_meta, [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self._vmops.spawn(context, instance, image_meta, injected_files, [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self._fetch_image_if_missing(context, vi) [ 952.748592] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] image_cache(vi, tmp_image_ds_loc) [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] vm_util.copy_virtual_disk( [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] session._wait_for_task(vmdk_copy_task) [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return self.wait_for_task(task_ref) [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return evt.wait() [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] result = hub.switch() [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 952.749026] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return self.greenlet.switch() [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self.f(*self.args, **self.kw) [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] raise exceptions.translate_fault(task_info.error) [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Faults: ['InvalidArgument'] [ 952.749404] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] [ 952.749404] env[68443]: INFO nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Terminating instance [ 952.753114] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.753114] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.753114] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 952.753114] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.753114] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09794a41-0f77-417e-a025-90265fc69ddd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.754906] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f7ae32-879e-46ea-8e5f-5d79f504f02c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.763372] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 952.763372] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40aa4c30-d944-40b1-b104-b56cbae87c27 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.765689] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.765991] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 952.767061] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7873f50a-7996-44be-a0d8-ab779d1ede8e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.773463] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for the task: (returnval){ [ 952.773463] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52284650-591f-e087-1ba7-65daf98f064b" [ 952.773463] env[68443]: _type = "Task" [ 952.773463] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.780600] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52284650-591f-e087-1ba7-65daf98f064b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.843230] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 952.843504] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 952.843709] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Deleting the datastore file [datastore1] b0882dec-0d2a-4f62-933d-0d24f3340026 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.843984] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ec8c47a-7926-40a4-95be-1e690563f1c3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.855431] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for the task: (returnval){ [ 952.855431] env[68443]: value = "task-3373931" [ 952.855431] env[68443]: _type = "Task" [ 952.855431] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.864827] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Task: {'id': task-3373931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.285173] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 953.288017] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Creating directory with path [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.288017] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27b8eda1-acdd-4c82-bc70-340dcca58a65 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.299823] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Created directory with path [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.300103] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Fetch image to [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 953.300321] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 953.301172] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec6dca5-e1ca-4bcd-bf5d-ccac41f071f6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.308854] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc26572-94cc-4755-ac5e-f41438899288 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.320467] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5085cf-804c-4f2f-b2e4-2efbfa92c8e1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.359972] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbb9e0e-e5d4-47a4-b8a9-63403d12dc43 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.367805] env[68443]: DEBUG oslo_vmware.api [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Task: {'id': task-3373931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071212} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.369295] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.369533] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 953.369750] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 953.369972] env[68443]: INFO nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Took 0.62 seconds to destroy the instance on the hypervisor. [ 953.372343] env[68443]: DEBUG nova.compute.claims [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 953.372572] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.372827] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.375676] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0ed11756-1aea-4aad-b44c-92ac47835ab8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.417125] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 953.619519] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 953.685691] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 953.685691] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 953.917737] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d0d5e7-cdb8-4603-9e6d-8a82cc85f273 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.927029] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af78f0-4b8b-47a7-98b8-3a849e51d910 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.967882] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d486f18-bfba-4272-9901-19ad4fffc21f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.979681] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a84720-0756-411f-bbf6-cdcf5f3e56cd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.995091] env[68443]: DEBUG nova.compute.provider_tree [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.005029] env[68443]: DEBUG nova.scheduler.client.report [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 954.023625] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.651s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.024244] env[68443]: ERROR nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.024244] env[68443]: Faults: ['InvalidArgument'] [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Traceback (most recent call last): [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self.driver.spawn(context, instance, image_meta, [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self._fetch_image_if_missing(context, vi) [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] image_cache(vi, tmp_image_ds_loc) [ 954.024244] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] vm_util.copy_virtual_disk( [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] session._wait_for_task(vmdk_copy_task) [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return self.wait_for_task(task_ref) [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return evt.wait() [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] result = hub.switch() [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] return self.greenlet.switch() [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 954.024584] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] self.f(*self.args, **self.kw) [ 954.024889] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 954.024889] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] raise exceptions.translate_fault(task_info.error) [ 954.024889] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.024889] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Faults: ['InvalidArgument'] [ 954.024889] env[68443]: ERROR nova.compute.manager [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] [ 954.025017] env[68443]: DEBUG nova.compute.utils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 954.028880] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Build of instance b0882dec-0d2a-4f62-933d-0d24f3340026 was re-scheduled: A specified parameter was not correct: fileType [ 954.028880] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 954.029326] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 954.029513] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 954.030648] env[68443]: DEBUG nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 954.030648] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.117284] env[68443]: DEBUG nova.network.neutron [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.129987] env[68443]: INFO nova.compute.manager [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Took 1.10 seconds to deallocate network for instance. [ 955.246468] env[68443]: INFO nova.scheduler.client.report [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Deleted allocations for instance b0882dec-0d2a-4f62-933d-0d24f3340026 [ 955.268650] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2e73bcfe-47fe-433d-a55f-52a0bf095590 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.111s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.269944] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 44.687s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.270221] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Acquiring lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.270475] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.270663] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.273259] env[68443]: INFO nova.compute.manager [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Terminating instance [ 955.275044] env[68443]: DEBUG nova.compute.manager [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 955.275308] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.275888] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29db9ae8-cd8d-4224-8119-a8adaf8a88b6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.290179] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dae0f17-c601-468a-9ba4-bfe75cc1b348 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.304777] env[68443]: DEBUG nova.compute.manager [None req-f0431b1a-8bf3-44e7-b9d5-92389cd9d936 tempest-ServersAdmin275Test-1137769342 tempest-ServersAdmin275Test-1137769342-project-member] [instance: 88c5636b-e0d5-4db2-8044-aa909a1da0cc] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.330495] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0882dec-0d2a-4f62-933d-0d24f3340026 could not be found. [ 955.330858] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 955.331134] env[68443]: INFO nova.compute.manager [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Took 0.06 seconds to destroy the instance on the hypervisor. [ 955.331733] env[68443]: DEBUG oslo.service.loopingcall [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.332094] env[68443]: DEBUG nova.compute.manager [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 955.332199] env[68443]: DEBUG nova.network.neutron [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.335614] env[68443]: DEBUG nova.compute.manager [None req-f0431b1a-8bf3-44e7-b9d5-92389cd9d936 tempest-ServersAdmin275Test-1137769342 tempest-ServersAdmin275Test-1137769342-project-member] [instance: 88c5636b-e0d5-4db2-8044-aa909a1da0cc] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.368374] env[68443]: DEBUG nova.network.neutron [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.375633] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f0431b1a-8bf3-44e7-b9d5-92389cd9d936 tempest-ServersAdmin275Test-1137769342 tempest-ServersAdmin275Test-1137769342-project-member] Lock "88c5636b-e0d5-4db2-8044-aa909a1da0cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.758s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.376694] env[68443]: INFO nova.compute.manager [-] [instance: b0882dec-0d2a-4f62-933d-0d24f3340026] Took 0.04 seconds to deallocate network for instance. [ 955.387103] env[68443]: DEBUG nova.compute.manager [None req-ae974ebd-ba16-4632-8c73-899fb8857118 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: ec17824f-18a5-4a44-8f64-33438ee4990d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.411548] env[68443]: DEBUG nova.compute.manager [None req-ae974ebd-ba16-4632-8c73-899fb8857118 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: ec17824f-18a5-4a44-8f64-33438ee4990d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.451928] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ae974ebd-ba16-4632-8c73-899fb8857118 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "ec17824f-18a5-4a44-8f64-33438ee4990d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.427s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.467784] env[68443]: DEBUG nova.compute.manager [None req-e93e5177-c6eb-431a-88fe-f2e582fdfe11 tempest-ServerMetadataNegativeTestJSON-393042034 tempest-ServerMetadataNegativeTestJSON-393042034-project-member] [instance: 12d34fba-743e-4f1c-aeaf-6914aa5788c1] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.517630] env[68443]: DEBUG nova.compute.manager [None req-e93e5177-c6eb-431a-88fe-f2e582fdfe11 tempest-ServerMetadataNegativeTestJSON-393042034 tempest-ServerMetadataNegativeTestJSON-393042034-project-member] [instance: 12d34fba-743e-4f1c-aeaf-6914aa5788c1] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.542854] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d1cb6758-46cc-42d0-8cf9-3c263aaf30c6 tempest-ServerExternalEventsTest-229631243 tempest-ServerExternalEventsTest-229631243-project-member] Lock "b0882dec-0d2a-4f62-933d-0d24f3340026" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.273s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.559369] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e93e5177-c6eb-431a-88fe-f2e582fdfe11 tempest-ServerMetadataNegativeTestJSON-393042034 tempest-ServerMetadataNegativeTestJSON-393042034-project-member] Lock "12d34fba-743e-4f1c-aeaf-6914aa5788c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.689s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.569660] env[68443]: DEBUG nova.compute.manager [None req-c4345f29-09d5-4e24-b33c-325e40bb6175 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: 5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.620149] env[68443]: DEBUG nova.compute.manager [None req-c4345f29-09d5-4e24-b33c-325e40bb6175 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: 5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.650145] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c4345f29-09d5-4e24-b33c-325e40bb6175 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "5afa4386-7640-4a3d-bc2b-b8b5e9d28ebf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.206s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.663659] env[68443]: DEBUG nova.compute.manager [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 410678da-9177-4822-9d48-a94eeefcd22f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.698974] env[68443]: DEBUG nova.compute.manager [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 410678da-9177-4822-9d48-a94eeefcd22f] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.732090] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "410678da-9177-4822-9d48-a94eeefcd22f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.683s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.753180] env[68443]: DEBUG nova.compute.manager [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 1c1675e9-0e4d-49d8-bb02-517b6021c35d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.791303] env[68443]: DEBUG nova.compute.manager [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 1c1675e9-0e4d-49d8-bb02-517b6021c35d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.820511] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a554a796-6fc0-43dc-b238-cf3300278695 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "1c1675e9-0e4d-49d8-bb02-517b6021c35d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.713s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.834519] env[68443]: DEBUG nova.compute.manager [None req-5e5c305d-3071-4b15-bb94-41cb6b4b28ec tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] [instance: 32a3402f-546a-4e3d-b71d-e4e3b50df6f0] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.873020] env[68443]: DEBUG nova.compute.manager [None req-5e5c305d-3071-4b15-bb94-41cb6b4b28ec tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] [instance: 32a3402f-546a-4e3d-b71d-e4e3b50df6f0] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.907180] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5e5c305d-3071-4b15-bb94-41cb6b4b28ec tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Lock "32a3402f-546a-4e3d-b71d-e4e3b50df6f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.369s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.921936] env[68443]: DEBUG nova.compute.manager [None req-10c9e7ab-65e9-40b7-bba9-eed74def74f5 tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] [instance: 96d4015d-e7a6-4fcc-8f73-afc928113cff] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 955.950083] env[68443]: DEBUG nova.compute.manager [None req-10c9e7ab-65e9-40b7-bba9-eed74def74f5 tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] [instance: 96d4015d-e7a6-4fcc-8f73-afc928113cff] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 955.977598] env[68443]: DEBUG oslo_concurrency.lockutils [None req-10c9e7ab-65e9-40b7-bba9-eed74def74f5 tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Lock "96d4015d-e7a6-4fcc-8f73-afc928113cff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.083s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.994441] env[68443]: DEBUG nova.compute.manager [None req-08515451-e63c-49f4-8f2b-9b96cb759d13 tempest-ServerActionsTestJSON-110831187 tempest-ServerActionsTestJSON-110831187-project-member] [instance: b8f1485f-2f6a-416f-a285-07607283eb08] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 956.031028] env[68443]: DEBUG nova.compute.manager [None req-08515451-e63c-49f4-8f2b-9b96cb759d13 tempest-ServerActionsTestJSON-110831187 tempest-ServerActionsTestJSON-110831187-project-member] [instance: b8f1485f-2f6a-416f-a285-07607283eb08] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 956.069963] env[68443]: DEBUG oslo_concurrency.lockutils [None req-08515451-e63c-49f4-8f2b-9b96cb759d13 tempest-ServerActionsTestJSON-110831187 tempest-ServerActionsTestJSON-110831187-project-member] Lock "b8f1485f-2f6a-416f-a285-07607283eb08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.020s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.086970] env[68443]: DEBUG nova.compute.manager [None req-300922a4-47ad-4696-9f3d-8947dface6dc tempest-ServersTestManualDisk-339913820 tempest-ServersTestManualDisk-339913820-project-member] [instance: b473237b-274d-450d-b5a1-63d78b08f6f0] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 956.124320] env[68443]: DEBUG nova.compute.manager [None req-300922a4-47ad-4696-9f3d-8947dface6dc tempest-ServersTestManualDisk-339913820 tempest-ServersTestManualDisk-339913820-project-member] [instance: b473237b-274d-450d-b5a1-63d78b08f6f0] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 956.155681] env[68443]: DEBUG oslo_concurrency.lockutils [None req-300922a4-47ad-4696-9f3d-8947dface6dc tempest-ServersTestManualDisk-339913820 tempest-ServersTestManualDisk-339913820-project-member] Lock "b473237b-274d-450d-b5a1-63d78b08f6f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.897s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.170447] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 956.275050] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.275050] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.275050] env[68443]: INFO nova.compute.claims [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 956.777606] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2f148f-1770-46cd-8759-83a6a6128c68 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.786184] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffff18f4-988c-4d82-b32f-ece70ae337a6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.819516] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e317c7-9616-4446-97db-cbdb5f1571d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.828355] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9795fd3d-8bce-4ebd-9246-9c6274887319 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.845710] env[68443]: DEBUG nova.compute.provider_tree [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.859229] env[68443]: DEBUG nova.scheduler.client.report [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.879594] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.607s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.880914] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 956.928299] env[68443]: DEBUG nova.compute.utils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.931234] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 956.931234] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 956.945334] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 957.016132] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 957.032958] env[68443]: DEBUG nova.policy [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db843ba923a047298d5a7853e87fc5c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8fc188bcadc4e989188f3d9f450de41', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 957.045542] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 957.045792] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 957.045951] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.046149] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 957.046299] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.046445] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 957.047380] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 957.047625] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 957.047887] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 957.048399] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 957.048656] env[68443]: DEBUG nova.virt.hardware [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.049607] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59acaa1d-834a-4007-9aff-a5e5483469e8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.060994] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d445e451-7214-4a05-b239-a53ccda6b432 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.763268] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Successfully created port: 027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.826025] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.826025] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.826025] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 957.844097] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 957.844097] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.844269] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 957.859432] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.870663] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.007470] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Successfully updated port: 027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.025208] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.026042] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquired lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.026259] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 959.075150] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 959.263178] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Updating instance_info_cache with network_info: [{"id": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "address": "fa:16:3e:6b:aa:74", "network": {"id": "d3d9988b-dea6-4b2d-a9cc-2dbfeb2cc237", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-630889970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fc188bcadc4e989188f3d9f450de41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap027e8f5e-a6", "ovs_interfaceid": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.277414] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Releasing lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.277728] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance network_info: |[{"id": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "address": "fa:16:3e:6b:aa:74", "network": {"id": "d3d9988b-dea6-4b2d-a9cc-2dbfeb2cc237", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-630889970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fc188bcadc4e989188f3d9f450de41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap027e8f5e-a6", "ovs_interfaceid": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 959.278429] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:aa:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23f4655e-3495-421d-be4e-f6002a85a47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '027e8f5e-a60f-4c86-b20d-931c48f743fd', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.292240] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Creating folder: Project (f8fc188bcadc4e989188f3d9f450de41). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 959.292326] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0056d45a-ba6c-490d-964d-1973a7483c0b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.304520] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Created folder: Project (f8fc188bcadc4e989188f3d9f450de41) in parent group-v673136. [ 959.304773] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Creating folder: Instances. Parent ref: group-v673179. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 959.305142] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87f79c1d-a702-40c1-8573-b32614c430e2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.314585] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Created folder: Instances in parent group-v673179. [ 959.314830] env[68443]: DEBUG oslo.service.loopingcall [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 959.315043] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 959.315238] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc61cda4-86c9-48cf-a657-eebf31a4fb0c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.338659] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.338659] env[68443]: value = "task-3373934" [ 959.338659] env[68443]: _type = "Task" [ 959.338659] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.346607] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373934, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.817829] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "1c1acc0d-263d-4687-93ff-291d18a592d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.851298] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373934, 'name': CreateVM_Task, 'duration_secs': 0.340539} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.852105] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.854321] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.854321] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.854321] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.854321] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32e8232c-b8d2-47d5-b60b-518ae154cec3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.858906] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for the task: (returnval){ [ 959.858906] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5252b3c5-8212-43ef-b2c8-c732a436ad1f" [ 959.858906] env[68443]: _type = "Task" [ 959.858906] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.867838] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5252b3c5-8212-43ef-b2c8-c732a436ad1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.373506] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.375198] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.375198] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.612041] env[68443]: DEBUG nova.compute.manager [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Received event network-vif-plugged-027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 960.612263] env[68443]: DEBUG oslo_concurrency.lockutils [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] Acquiring lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.612488] env[68443]: DEBUG oslo_concurrency.lockutils [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.612670] env[68443]: DEBUG oslo_concurrency.lockutils [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.612841] env[68443]: DEBUG nova.compute.manager [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] No waiting events found dispatching network-vif-plugged-027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 960.614066] env[68443]: WARNING nova.compute.manager [req-5a60ba11-04fa-40d8-bfc4-ac4211d9f66c req-8486863f-75b7-4e1a-8d6f-6f47de83f741 service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Received unexpected event network-vif-plugged-027e8f5e-a60f-4c86-b20d-931c48f743fd for instance with vm_state building and task_state deleting. [ 960.825026] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.825193] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.825263] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 960.825406] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 961.826608] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 961.826608] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 961.826608] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 961.851277] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.851277] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.851277] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.851277] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.851277] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.851833] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.853232] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.853513] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.853663] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.853793] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 961.853916] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 961.854519] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 961.870023] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.873831] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.873831] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.873831] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 961.873831] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b60935-789f-45de-823d-0e85f07f9afb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.881436] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269f05bd-f7fd-43f7-9b1d-7c7179da709a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.902877] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a6bcb5-9c99-4f5b-b388-05866424bc76 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.910644] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a11fee0-165d-499b-b9ff-d29791a73e5b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.944559] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181011MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 961.944733] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.944936] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.040291] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.040598] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.147786] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148044] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f5aa2b1b-c290-42f2-84d3-272415184f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148230] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148373] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148499] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148619] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148737] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148852] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.148966] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.149091] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 962.164410] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b436b172-7da8-4753-9e5c-896675ae640c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.179363] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cf57946-5db0-4c7c-a537-3f35e50c231f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.197296] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 960a8177-a962-471b-850c-7fdf16544cbe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.213335] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.225447] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bc6ee2e0-ed65-4c30-bb37-28436f8d487d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.237151] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a79985f5-4be3-4b95-a3d3-339b7f25b9e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.251257] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 67780abc-a216-41d2-b531-d31aef150587 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.265313] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.282023] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aabad6b5-1bf9-44ed-8fee-60a06c46f890 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.298155] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.314710] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.327597] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 767f1f71-6b02-4b88-83bf-dcbf9d87b895 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.341961] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 95bd5db1-4fff-45a2-84a0-6dd35c8463a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.358531] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.375525] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 962.375525] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 962.375525] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 962.397366] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 962.418967] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 962.418967] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.434171] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 962.463656] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 962.894295] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83515c3a-bfb4-4150-8457-af5e2700544e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.902544] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce38b81-5be1-4099-a7ca-1fdf5a1e93a3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.933652] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a8c5dd-5aa0-4ebf-8c12-d608e9a677c9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.941259] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d06bd7a-bab0-4818-8fe9-bb8d06db4627 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.954432] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.964113] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.985690] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 962.985930] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.041s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.981696] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.982160] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.347477] env[68443]: DEBUG nova.compute.manager [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Received event network-changed-027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 964.347675] env[68443]: DEBUG nova.compute.manager [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Refreshing instance network info cache due to event network-changed-027e8f5e-a60f-4c86-b20d-931c48f743fd. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 964.347890] env[68443]: DEBUG oslo_concurrency.lockutils [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] Acquiring lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.348038] env[68443]: DEBUG oslo_concurrency.lockutils [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] Acquired lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.348196] env[68443]: DEBUG nova.network.neutron [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Refreshing network info cache for port 027e8f5e-a60f-4c86-b20d-931c48f743fd {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 964.896253] env[68443]: DEBUG nova.network.neutron [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Updated VIF entry in instance network info cache for port 027e8f5e-a60f-4c86-b20d-931c48f743fd. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 964.896253] env[68443]: DEBUG nova.network.neutron [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Updating instance_info_cache with network_info: [{"id": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "address": "fa:16:3e:6b:aa:74", "network": {"id": "d3d9988b-dea6-4b2d-a9cc-2dbfeb2cc237", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-630889970-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8fc188bcadc4e989188f3d9f450de41", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap027e8f5e-a6", "ovs_interfaceid": "027e8f5e-a60f-4c86-b20d-931c48f743fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.909649] env[68443]: DEBUG oslo_concurrency.lockutils [req-6ec0dffa-73a8-4cec-abe7-fe3fc3a20bb6 req-5a79cdd0-6600-4ff6-84a7-34aab12655dc service nova] Releasing lock "refresh_cache-1c1acc0d-263d-4687-93ff-291d18a592d8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.558127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cbde4992-026f-49d4-92d1-2394747ecb7c tempest-ServersNegativeTestMultiTenantJSON-1168988682 tempest-ServersNegativeTestMultiTenantJSON-1168988682-project-member] Acquiring lock "9ad6308e-05f6-4070-bbda-500e043265f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.558443] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cbde4992-026f-49d4-92d1-2394747ecb7c tempest-ServersNegativeTestMultiTenantJSON-1168988682 tempest-ServersNegativeTestMultiTenantJSON-1168988682-project-member] Lock "9ad6308e-05f6-4070-bbda-500e043265f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.938193] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a27ab00b-22ac-4ee6-8277-d7287f98d8ed tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Acquiring lock "e8c14375-4019-4d0d-9f98-6732a55faa89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.938428] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a27ab00b-22ac-4ee6-8277-d7287f98d8ed tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Lock "e8c14375-4019-4d0d-9f98-6732a55faa89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.729384] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1711075c-551b-49a9-95bd-3cc1c7c03de9 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Acquiring lock "88ce1e8e-b0d3-4c98-8efe-c0258ef1b606" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.729677] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1711075c-551b-49a9-95bd-3cc1c7c03de9 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "88ce1e8e-b0d3-4c98-8efe-c0258ef1b606" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.084242] env[68443]: DEBUG oslo_concurrency.lockutils [None req-055543e0-11d9-42da-b447-2c86344c5477 tempest-ServersTestFqdnHostnames-23424126 tempest-ServersTestFqdnHostnames-23424126-project-member] Acquiring lock "0591711b-fa55-403d-b8cc-c055b8867214" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.084605] env[68443]: DEBUG oslo_concurrency.lockutils [None req-055543e0-11d9-42da-b447-2c86344c5477 tempest-ServersTestFqdnHostnames-23424126 tempest-ServersTestFqdnHostnames-23424126-project-member] Lock "0591711b-fa55-403d-b8cc-c055b8867214" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.650684] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c7b2aef3-e736-46ee-8ce7-e09d3950e5c6 tempest-ServerRescueTestJSONUnderV235-759347534 tempest-ServerRescueTestJSONUnderV235-759347534-project-member] Acquiring lock "f3b037d7-2a19-498f-b60e-7ac172411054" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.650991] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c7b2aef3-e736-46ee-8ce7-e09d3950e5c6 tempest-ServerRescueTestJSONUnderV235-759347534 tempest-ServerRescueTestJSONUnderV235-759347534-project-member] Lock "f3b037d7-2a19-498f-b60e-7ac172411054" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.897541] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8e57b53-27af-4072-bbb7-129551ef0dd1 tempest-ServerMetadataTestJSON-159372343 tempest-ServerMetadataTestJSON-159372343-project-member] Acquiring lock "1f385709-d38c-42bc-b250-e4260f7c8af8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.897898] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8e57b53-27af-4072-bbb7-129551ef0dd1 tempest-ServerMetadataTestJSON-159372343 tempest-ServerMetadataTestJSON-159372343-project-member] Lock "1f385709-d38c-42bc-b250-e4260f7c8af8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.429638] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Acquiring lock "aa464980-860e-4c6f-a732-83d75503e4c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.429942] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "aa464980-860e-4c6f-a732-83d75503e4c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.453941] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Acquiring lock "3ec222f1-9af7-46f8-97ff-27a8f96bd4d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.454537] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "3ec222f1-9af7-46f8-97ff-27a8f96bd4d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.482891] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Acquiring lock "7d79fd13-4514-4fef-b953-4d85af6af40d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.483063] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "7d79fd13-4514-4fef-b953-4d85af6af40d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.807418] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8c75c97b-6d9f-407b-a644-d28d29e2e261 tempest-ServerActionsV293TestJSON-2331396 tempest-ServerActionsV293TestJSON-2331396-project-member] Acquiring lock "c7488d31-9e3c-42d5-8bdc-51919522d556" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.807860] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8c75c97b-6d9f-407b-a644-d28d29e2e261 tempest-ServerActionsV293TestJSON-2331396 tempest-ServerActionsV293TestJSON-2331396-project-member] Lock "c7488d31-9e3c-42d5-8bdc-51919522d556" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.945049] env[68443]: WARNING oslo_vmware.rw_handles [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1002.945049] env[68443]: ERROR oslo_vmware.rw_handles [ 1002.946134] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1002.947188] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1002.947426] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Copying Virtual Disk [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/c4296a8e-ce84-4371-990e-7e714764cadf/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1002.947714] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-adfaf0ad-717c-4426-838c-d46af09c0f8f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.957906] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for the task: (returnval){ [ 1002.957906] env[68443]: value = "task-3373946" [ 1002.957906] env[68443]: _type = "Task" [ 1002.957906] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.966479] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Task: {'id': task-3373946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.468387] env[68443]: DEBUG oslo_vmware.exceptions [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1003.468740] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.469342] env[68443]: ERROR nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1003.469342] env[68443]: Faults: ['InvalidArgument'] [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Traceback (most recent call last): [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] yield resources [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self.driver.spawn(context, instance, image_meta, [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self._fetch_image_if_missing(context, vi) [ 1003.469342] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] image_cache(vi, tmp_image_ds_loc) [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] vm_util.copy_virtual_disk( [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] session._wait_for_task(vmdk_copy_task) [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return self.wait_for_task(task_ref) [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return evt.wait() [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] result = hub.switch() [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1003.469779] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return self.greenlet.switch() [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self.f(*self.args, **self.kw) [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] raise exceptions.translate_fault(task_info.error) [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Faults: ['InvalidArgument'] [ 1003.470162] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] [ 1003.470162] env[68443]: INFO nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Terminating instance [ 1003.471301] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.471534] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.472195] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1003.472388] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.472625] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3935a55-9c0d-4553-847b-1757a6aada31 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.474992] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164bd05d-5b76-4a40-9eed-24114013be9c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.482329] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1003.482546] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-997c398f-83b5-472c-9cc2-d2435b8ac1d1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.484835] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.485032] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1003.485989] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e163b41-82bf-412b-ad35-848a903751a7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.491129] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for the task: (returnval){ [ 1003.491129] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]529f6700-8840-e625-b035-54f1711ffac9" [ 1003.491129] env[68443]: _type = "Task" [ 1003.491129] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.498852] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]529f6700-8840-e625-b035-54f1711ffac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.557755] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1003.557985] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1003.558183] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Deleting the datastore file [datastore1] 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.558455] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14728c9a-fabe-44ef-893a-eb39114aefcd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.565718] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for the task: (returnval){ [ 1003.565718] env[68443]: value = "task-3373948" [ 1003.565718] env[68443]: _type = "Task" [ 1003.565718] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.574116] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Task: {'id': task-3373948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.003090] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1004.003403] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Creating directory with path [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.003736] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aff42388-70bd-4174-8f8a-23dd94a83814 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.016173] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Created directory with path [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.016383] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Fetch image to [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1004.016590] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1004.017350] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326a8cbd-c242-41d1-97fe-d7920a7351ed {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.024366] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cadea6-6ee5-4395-81d5-11e13fa8f6bb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.033896] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0433300-89b1-4ea1-a346-d757e6a49977 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.069020] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90919a3f-0ef9-42a1-9caf-a00b775db121 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.078424] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-77b0b2ac-e57d-4912-9bf2-cfa7244dd59a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.080325] env[68443]: DEBUG oslo_vmware.api [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Task: {'id': task-3373948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066342} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.080730] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.081076] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1004.081403] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1004.083038] env[68443]: INFO nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1004.083951] env[68443]: DEBUG nova.compute.claims [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1004.084185] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.084448] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.116985] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1004.185244] env[68443]: DEBUG oslo_vmware.rw_handles [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1004.250271] env[68443]: DEBUG oslo_vmware.rw_handles [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1004.250527] env[68443]: DEBUG oslo_vmware.rw_handles [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1004.532258] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49282995-54c2-4aba-a011-c7c8d04eb6ad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.540169] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcdb6d0-a92b-4657-b99a-54dec57e9cb8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.570131] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a413e7fa-1e00-4479-a99e-6226d4b094a0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.579307] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8eaffc-02e1-4c94-a860-674287a9ba70 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.594400] env[68443]: DEBUG nova.compute.provider_tree [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.603985] env[68443]: DEBUG nova.scheduler.client.report [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.621066] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.536s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.621610] env[68443]: ERROR nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1004.621610] env[68443]: Faults: ['InvalidArgument'] [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Traceback (most recent call last): [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self.driver.spawn(context, instance, image_meta, [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self._fetch_image_if_missing(context, vi) [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] image_cache(vi, tmp_image_ds_loc) [ 1004.621610] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] vm_util.copy_virtual_disk( [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] session._wait_for_task(vmdk_copy_task) [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return self.wait_for_task(task_ref) [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return evt.wait() [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] result = hub.switch() [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] return self.greenlet.switch() [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1004.622072] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] self.f(*self.args, **self.kw) [ 1004.622462] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1004.622462] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] raise exceptions.translate_fault(task_info.error) [ 1004.622462] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1004.622462] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Faults: ['InvalidArgument'] [ 1004.622462] env[68443]: ERROR nova.compute.manager [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] [ 1004.622462] env[68443]: DEBUG nova.compute.utils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1004.623718] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Build of instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 was re-scheduled: A specified parameter was not correct: fileType [ 1004.623718] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1004.624104] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1004.624278] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1004.624434] env[68443]: DEBUG nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1004.624596] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.952248] env[68443]: DEBUG nova.network.neutron [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.962958] env[68443]: INFO nova.compute.manager [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Took 0.34 seconds to deallocate network for instance. [ 1005.073893] env[68443]: INFO nova.scheduler.client.report [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Deleted allocations for instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 [ 1005.092234] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b57c37a8-6a4a-4b69-b8a9-cb3e0e2de29c tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 287.707s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.093234] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 90.236s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.094052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Acquiring lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.094052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.094052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.096389] env[68443]: INFO nova.compute.manager [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Terminating instance [ 1005.098113] env[68443]: DEBUG nova.compute.manager [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1005.098401] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1005.098776] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96ae1623-7302-4366-9b5e-714567469437 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.109066] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937bf5c3-3d1b-418a-9eca-40ed63a2728c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.122104] env[68443]: DEBUG nova.compute.manager [None req-1d26a334-e904-4e30-9c40-23dbea2eb2ba tempest-ImagesOneServerNegativeTestJSON-1627556834 tempest-ImagesOneServerNegativeTestJSON-1627556834-project-member] [instance: b436b172-7da8-4753-9e5c-896675ae640c] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.146916] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909 could not be found. [ 1005.147587] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1005.147587] env[68443]: INFO nova.compute.manager [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1005.147753] env[68443]: DEBUG oslo.service.loopingcall [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.148937] env[68443]: DEBUG nova.compute.manager [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1005.148937] env[68443]: DEBUG nova.network.neutron [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1005.151367] env[68443]: DEBUG nova.compute.manager [None req-1d26a334-e904-4e30-9c40-23dbea2eb2ba tempest-ImagesOneServerNegativeTestJSON-1627556834 tempest-ImagesOneServerNegativeTestJSON-1627556834-project-member] [instance: b436b172-7da8-4753-9e5c-896675ae640c] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.173674] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d26a334-e904-4e30-9c40-23dbea2eb2ba tempest-ImagesOneServerNegativeTestJSON-1627556834 tempest-ImagesOneServerNegativeTestJSON-1627556834-project-member] Lock "b436b172-7da8-4753-9e5c-896675ae640c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.228s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.177290] env[68443]: DEBUG nova.network.neutron [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.184234] env[68443]: DEBUG nova.compute.manager [None req-4e70ac1d-1955-44f2-a0a0-d24892f236af tempest-FloatingIPsAssociationNegativeTestJSON-1020643258 tempest-FloatingIPsAssociationNegativeTestJSON-1020643258-project-member] [instance: 0cf57946-5db0-4c7c-a537-3f35e50c231f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.187355] env[68443]: INFO nova.compute.manager [-] [instance: 5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909] Took 0.04 seconds to deallocate network for instance. [ 1005.220269] env[68443]: DEBUG nova.compute.manager [None req-4e70ac1d-1955-44f2-a0a0-d24892f236af tempest-FloatingIPsAssociationNegativeTestJSON-1020643258 tempest-FloatingIPsAssociationNegativeTestJSON-1020643258-project-member] [instance: 0cf57946-5db0-4c7c-a537-3f35e50c231f] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.247179] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4e70ac1d-1955-44f2-a0a0-d24892f236af tempest-FloatingIPsAssociationNegativeTestJSON-1020643258 tempest-FloatingIPsAssociationNegativeTestJSON-1020643258-project-member] Lock "0cf57946-5db0-4c7c-a537-3f35e50c231f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.609s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.257946] env[68443]: DEBUG nova.compute.manager [None req-a0327573-49f9-4745-aa6c-a13a9b1ef3a3 tempest-ServersTestJSON-1212411784 tempest-ServersTestJSON-1212411784-project-member] [instance: 960a8177-a962-471b-850c-7fdf16544cbe] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.282808] env[68443]: DEBUG nova.compute.manager [None req-a0327573-49f9-4745-aa6c-a13a9b1ef3a3 tempest-ServersTestJSON-1212411784 tempest-ServersTestJSON-1212411784-project-member] [instance: 960a8177-a962-471b-850c-7fdf16544cbe] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.316267] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a0327573-49f9-4745-aa6c-a13a9b1ef3a3 tempest-ServersTestJSON-1212411784 tempest-ServersTestJSON-1212411784-project-member] Lock "960a8177-a962-471b-850c-7fdf16544cbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.864s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.322506] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04c5b72b-0f5b-439b-bc52-3b3953721073 tempest-ServersAdminNegativeTestJSON-312218638 tempest-ServersAdminNegativeTestJSON-312218638-project-member] Lock "5ebb83ca-5c3e-40c2-a9c3-9c1f182a1909" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.328316] env[68443]: DEBUG nova.compute.manager [None req-fd5b3b0b-5e1a-47ff-b27e-dbc1314c1ebd tempest-ServerDiagnosticsNegativeTest-621827723 tempest-ServerDiagnosticsNegativeTest-621827723-project-member] [instance: 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.368413] env[68443]: DEBUG nova.compute.manager [None req-fd5b3b0b-5e1a-47ff-b27e-dbc1314c1ebd tempest-ServerDiagnosticsNegativeTest-621827723 tempest-ServerDiagnosticsNegativeTest-621827723-project-member] [instance: 0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.391181] env[68443]: DEBUG oslo_concurrency.lockutils [None req-fd5b3b0b-5e1a-47ff-b27e-dbc1314c1ebd tempest-ServerDiagnosticsNegativeTest-621827723 tempest-ServerDiagnosticsNegativeTest-621827723-project-member] Lock "0cab7c98-d6d4-401f-a8a3-6aeb7c4ae3e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.477s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.400374] env[68443]: DEBUG nova.compute.manager [None req-34fa30bf-596f-403d-bae3-c4edd8f220c7 tempest-ServerActionsTestOtherB-1356055185 tempest-ServerActionsTestOtherB-1356055185-project-member] [instance: bc6ee2e0-ed65-4c30-bb37-28436f8d487d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.429776] env[68443]: DEBUG nova.compute.manager [None req-34fa30bf-596f-403d-bae3-c4edd8f220c7 tempest-ServerActionsTestOtherB-1356055185 tempest-ServerActionsTestOtherB-1356055185-project-member] [instance: bc6ee2e0-ed65-4c30-bb37-28436f8d487d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.450750] env[68443]: DEBUG oslo_concurrency.lockutils [None req-34fa30bf-596f-403d-bae3-c4edd8f220c7 tempest-ServerActionsTestOtherB-1356055185 tempest-ServerActionsTestOtherB-1356055185-project-member] Lock "bc6ee2e0-ed65-4c30-bb37-28436f8d487d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.072s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.459720] env[68443]: DEBUG nova.compute.manager [None req-36d31862-1183-414d-b4f0-a1d408c292de tempest-ServersListShow296Test-31630010 tempest-ServersListShow296Test-31630010-project-member] [instance: a79985f5-4be3-4b95-a3d3-339b7f25b9e9] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.486300] env[68443]: DEBUG nova.compute.manager [None req-36d31862-1183-414d-b4f0-a1d408c292de tempest-ServersListShow296Test-31630010 tempest-ServersListShow296Test-31630010-project-member] [instance: a79985f5-4be3-4b95-a3d3-339b7f25b9e9] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.507254] env[68443]: DEBUG oslo_concurrency.lockutils [None req-36d31862-1183-414d-b4f0-a1d408c292de tempest-ServersListShow296Test-31630010 tempest-ServersListShow296Test-31630010-project-member] Lock "a79985f5-4be3-4b95-a3d3-339b7f25b9e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.189s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.516105] env[68443]: DEBUG nova.compute.manager [None req-2598a7ae-49a0-4bde-bf45-5c28a1eeb6fe tempest-AttachInterfacesV270Test-1131883941 tempest-AttachInterfacesV270Test-1131883941-project-member] [instance: 67780abc-a216-41d2-b531-d31aef150587] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.540153] env[68443]: DEBUG nova.compute.manager [None req-2598a7ae-49a0-4bde-bf45-5c28a1eeb6fe tempest-AttachInterfacesV270Test-1131883941 tempest-AttachInterfacesV270Test-1131883941-project-member] [instance: 67780abc-a216-41d2-b531-d31aef150587] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1005.561076] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2598a7ae-49a0-4bde-bf45-5c28a1eeb6fe tempest-AttachInterfacesV270Test-1131883941 tempest-AttachInterfacesV270Test-1131883941-project-member] Lock "67780abc-a216-41d2-b531-d31aef150587" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.973s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.569664] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1005.626358] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.626725] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.628245] env[68443]: INFO nova.compute.claims [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.991976] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bce0022-24e4-4cb5-a309-23c8fc6cc9be {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.000789] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deab8550-cc3e-4445-b806-bdb2ad576e86 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.031273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692ec960-e2f8-405d-a8dd-40352d76398c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.039049] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2f25c2-6c9d-4676-82ad-fded6c78ab53 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.052613] env[68443]: DEBUG nova.compute.provider_tree [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.062027] env[68443]: DEBUG nova.scheduler.client.report [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1006.076555] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.450s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.077049] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1006.107154] env[68443]: DEBUG nova.compute.utils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.108752] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1006.108931] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1006.121621] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1006.195463] env[68443]: DEBUG nova.policy [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12e52689684045b7a066b0e26dcfbaad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f89b76de8ed4ed3b757ff98465b51c8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1006.201738] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1006.233765] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.234020] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.234193] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.234380] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.234529] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.234677] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.234882] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.235053] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.235227] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.235393] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.235565] env[68443]: DEBUG nova.virt.hardware [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.236738] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311cb559-19f7-4251-949f-6217d3041c79 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.245355] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad50c36c-f317-4271-8594-73a17ff01137 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.626836] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Successfully created port: b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.270873] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Successfully updated port: b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.283848] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.283848] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.283848] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.291110] env[68443]: DEBUG nova.compute.manager [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Received event network-vif-plugged-b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1007.291110] env[68443]: DEBUG oslo_concurrency.lockutils [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] Acquiring lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.291110] env[68443]: DEBUG oslo_concurrency.lockutils [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.291110] env[68443]: DEBUG oslo_concurrency.lockutils [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.292593] env[68443]: DEBUG nova.compute.manager [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] No waiting events found dispatching network-vif-plugged-b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.294249] env[68443]: WARNING nova.compute.manager [req-328ec439-4825-484d-8c87-4e0dc6ca44fc req-1a7b06cd-4900-4cd3-adca-16af0e208b32 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Received unexpected event network-vif-plugged-b0846a9c-46c4-44a6-9536-b6e91e5ebd64 for instance with vm_state building and task_state spawning. [ 1007.467155] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1007.648042] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Updating instance_info_cache with network_info: [{"id": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "address": "fa:16:3e:3a:e8:f5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0846a9c-46", "ovs_interfaceid": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.663411] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.663411] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance network_info: |[{"id": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "address": "fa:16:3e:3a:e8:f5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0846a9c-46", "ovs_interfaceid": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1007.663779] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:e8:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0846a9c-46c4-44a6-9536-b6e91e5ebd64', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.671250] env[68443]: DEBUG oslo.service.loopingcall [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.671410] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1007.671749] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97d96a7c-ab4d-4f3a-8568-0f404093006f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.694346] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.694346] env[68443]: value = "task-3373949" [ 1007.694346] env[68443]: _type = "Task" [ 1007.694346] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.703615] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373949, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.206504] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373949, 'name': CreateVM_Task, 'duration_secs': 0.31416} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.207126] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1008.208052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.208369] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.209135] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1008.209436] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39a3d042-f2c5-4b83-8dfc-f9e82a9ef6db {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.214927] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 1008.214927] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52e89122-cf8a-1724-43e6-12e70c363e23" [ 1008.214927] env[68443]: _type = "Task" [ 1008.214927] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.224533] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52e89122-cf8a-1724-43e6-12e70c363e23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.727318] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.727937] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.728279] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.341505] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.680817] env[68443]: DEBUG nova.compute.manager [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Received event network-changed-b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1009.681071] env[68443]: DEBUG nova.compute.manager [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Refreshing instance network info cache due to event network-changed-b0846a9c-46c4-44a6-9536-b6e91e5ebd64. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1009.681253] env[68443]: DEBUG oslo_concurrency.lockutils [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] Acquiring lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.681394] env[68443]: DEBUG oslo_concurrency.lockutils [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] Acquired lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.681554] env[68443]: DEBUG nova.network.neutron [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Refreshing network info cache for port b0846a9c-46c4-44a6-9536-b6e91e5ebd64 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.005310] env[68443]: DEBUG nova.network.neutron [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Updated VIF entry in instance network info cache for port b0846a9c-46c4-44a6-9536-b6e91e5ebd64. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.005643] env[68443]: DEBUG nova.network.neutron [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Updating instance_info_cache with network_info: [{"id": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "address": "fa:16:3e:3a:e8:f5", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0846a9c-46", "ovs_interfaceid": "b0846a9c-46c4-44a6-9536-b6e91e5ebd64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.017339] env[68443]: DEBUG oslo_concurrency.lockutils [req-7891cbf0-ab9b-4d8e-85d2-b6dcde4820e4 req-77db3aec-a2ba-4326-9d75-cee11667f313 service nova] Releasing lock "refresh_cache-1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.439247] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.439594] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.825391] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.824682] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1020.821128] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.824566] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.824862] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1021.824862] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1021.844531] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.844715] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.844820] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.844946] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845082] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845208] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845330] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845449] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845568] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845727] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1021.845808] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1021.846274] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.846461] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1021.857721] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.857993] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.858101] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.858272] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1021.859597] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af876024-e2fe-4cd5-a006-53355f0688de {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.868178] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02a458c-700a-4260-a02a-ef6529cb7096 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.884143] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d932d2e4-7e31-4e36-be40-e5726988aca2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.890818] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8faee6-534f-408b-9b21-eab67b5f0c84 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.921289] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180998MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1021.921504] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.921785] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.997865] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f5aa2b1b-c290-42f2-84d3-272415184f14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998036] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998171] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998318] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998440] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998560] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998675] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998789] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.998901] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1021.999021] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1022.014334] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aabad6b5-1bf9-44ed-8fee-60a06c46f890 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.026031] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.037087] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.046496] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 767f1f71-6b02-4b88-83bf-dcbf9d87b895 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.059230] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 95bd5db1-4fff-45a2-84a0-6dd35c8463a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.068717] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.077712] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.086781] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 9ad6308e-05f6-4070-bbda-500e043265f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.096790] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e8c14375-4019-4d0d-9f98-6732a55faa89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.105971] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88ce1e8e-b0d3-4c98-8efe-c0258ef1b606 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.115372] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0591711b-fa55-403d-b8cc-c055b8867214 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.124305] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3b037d7-2a19-498f-b60e-7ac172411054 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.134173] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f385709-d38c-42bc-b250-e4260f7c8af8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.146513] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aa464980-860e-4c6f-a732-83d75503e4c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.157278] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3ec222f1-9af7-46f8-97ff-27a8f96bd4d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.167093] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7d79fd13-4514-4fef-b953-4d85af6af40d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.176667] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance c7488d31-9e3c-42d5-8bdc-51919522d556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.188130] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1022.188427] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1022.188597] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1022.520847] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafb7f82-49f9-47b7-b02a-5dad0ac25446 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.528462] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a02d17-3acf-4890-9e44-59ec71dccfd0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.558063] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e5575b-7941-466c-b1b4-a45ed38d94eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.565623] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3b4c19-404b-4dc0-ac58-130292403721 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.578230] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.587118] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1022.601140] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1022.601340] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.680s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.580325] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1023.580616] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1023.580765] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1023.825150] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1024.820550] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.740130] env[68443]: WARNING oslo_vmware.rw_handles [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1053.740130] env[68443]: ERROR oslo_vmware.rw_handles [ 1053.740772] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1053.742560] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1053.742858] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Copying Virtual Disk [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/4aee9cf5-9204-4fce-add4-9f01a3e715ec/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1053.743085] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9dc8de02-2c04-4c73-9fe7-0aa8a4d291e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.752843] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for the task: (returnval){ [ 1053.752843] env[68443]: value = "task-3373950" [ 1053.752843] env[68443]: _type = "Task" [ 1053.752843] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.761132] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Task: {'id': task-3373950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.263958] env[68443]: DEBUG oslo_vmware.exceptions [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1054.264268] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1054.264837] env[68443]: ERROR nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1054.264837] env[68443]: Faults: ['InvalidArgument'] [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Traceback (most recent call last): [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] yield resources [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self.driver.spawn(context, instance, image_meta, [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self._fetch_image_if_missing(context, vi) [ 1054.264837] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] image_cache(vi, tmp_image_ds_loc) [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] vm_util.copy_virtual_disk( [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] session._wait_for_task(vmdk_copy_task) [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return self.wait_for_task(task_ref) [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return evt.wait() [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] result = hub.switch() [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1054.265256] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return self.greenlet.switch() [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self.f(*self.args, **self.kw) [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] raise exceptions.translate_fault(task_info.error) [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Faults: ['InvalidArgument'] [ 1054.265657] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] [ 1054.265657] env[68443]: INFO nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Terminating instance [ 1054.267159] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.267159] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.267277] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8687e9fb-5070-486e-9a9e-2076958d702a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.269716] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1054.269904] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1054.270628] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9146a6-0a8a-41ab-874b-62733e3837ac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.277273] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1054.277473] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-540bfa97-4585-424e-b76c-e99fcb1210f9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.279675] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.279842] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1054.280908] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0bc5cc5-b242-4d5a-ba5b-e3ac4043e282 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.285866] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for the task: (returnval){ [ 1054.285866] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52968640-97c0-306b-1b66-f1e11b34ed1e" [ 1054.285866] env[68443]: _type = "Task" [ 1054.285866] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.296175] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52968640-97c0-306b-1b66-f1e11b34ed1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.353150] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1054.353381] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1054.353558] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Deleting the datastore file [datastore1] f5aa2b1b-c290-42f2-84d3-272415184f14 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.353838] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-591d452b-1d37-4a44-85f6-72734b34bae6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.359505] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for the task: (returnval){ [ 1054.359505] env[68443]: value = "task-3373952" [ 1054.359505] env[68443]: _type = "Task" [ 1054.359505] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.367045] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Task: {'id': task-3373952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.796258] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1054.796556] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Creating directory with path [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.796738] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76301ec5-5887-448c-b12e-f22d60a7ca36 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.808148] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Created directory with path [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.808376] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Fetch image to [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1054.808583] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1054.809343] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54deb6da-58a7-4566-9a66-aa9105a18a82 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.817007] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219a5265-489c-4eeb-a859-6282a908859f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.826639] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41285a62-bc36-4f05-bef9-e5d37e9dafc9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.857341] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f90f27c-e04d-4c71-b4a4-dfdde0460479 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.866068] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-908f78dc-0103-47fc-966f-4663b2e9a5be {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.873343] env[68443]: DEBUG oslo_vmware.api [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Task: {'id': task-3373952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083579} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.874213] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.874501] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1054.874827] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1054.875240] env[68443]: INFO nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1054.878098] env[68443]: DEBUG nova.compute.claims [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1054.878329] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.878713] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.898512] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1054.956898] env[68443]: DEBUG oslo_vmware.rw_handles [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1055.023516] env[68443]: DEBUG oslo_vmware.rw_handles [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1055.023714] env[68443]: DEBUG oslo_vmware.rw_handles [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1055.320761] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0120841-dd36-4ecf-b5e4-1429b219936b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.328764] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0947461b-1f9a-4c11-9635-6ec99bf29020 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.363262] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e8ba8c-d288-48d3-9c8d-d26a36216455 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.371748] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510a4aa7-b7a2-457c-bbed-ee467c68abfd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.384845] env[68443]: DEBUG nova.compute.provider_tree [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.393888] env[68443]: DEBUG nova.scheduler.client.report [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1055.407419] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.529s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.408073] env[68443]: ERROR nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1055.408073] env[68443]: Faults: ['InvalidArgument'] [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Traceback (most recent call last): [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self.driver.spawn(context, instance, image_meta, [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self._fetch_image_if_missing(context, vi) [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] image_cache(vi, tmp_image_ds_loc) [ 1055.408073] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] vm_util.copy_virtual_disk( [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] session._wait_for_task(vmdk_copy_task) [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return self.wait_for_task(task_ref) [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return evt.wait() [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] result = hub.switch() [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] return self.greenlet.switch() [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1055.408492] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] self.f(*self.args, **self.kw) [ 1055.408871] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1055.408871] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] raise exceptions.translate_fault(task_info.error) [ 1055.408871] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1055.408871] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Faults: ['InvalidArgument'] [ 1055.408871] env[68443]: ERROR nova.compute.manager [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] [ 1055.408871] env[68443]: DEBUG nova.compute.utils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1055.410306] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Build of instance f5aa2b1b-c290-42f2-84d3-272415184f14 was re-scheduled: A specified parameter was not correct: fileType [ 1055.410306] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1055.410729] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1055.410921] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1055.411143] env[68443]: DEBUG nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1055.411321] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1055.726422] env[68443]: DEBUG nova.network.neutron [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.744347] env[68443]: INFO nova.compute.manager [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Took 0.33 seconds to deallocate network for instance. [ 1055.867968] env[68443]: INFO nova.scheduler.client.report [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Deleted allocations for instance f5aa2b1b-c290-42f2-84d3-272415184f14 [ 1055.900150] env[68443]: DEBUG oslo_concurrency.lockutils [None req-def29891-1b31-4e7a-a848-aad6147aa3aa tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 331.667s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.901362] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 132.182s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.901751] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Acquiring lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.902187] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.902398] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.905055] env[68443]: INFO nova.compute.manager [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Terminating instance [ 1055.906938] env[68443]: DEBUG nova.compute.manager [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1055.907197] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1055.907782] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e35ff6e-bab1-40f5-ab6f-e23603dabfc8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.918261] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75cf50fe-6d71-4d45-9b70-aa7456a373aa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.930400] env[68443]: DEBUG nova.compute.manager [None req-4371b7a8-64b0-4bae-af4b-23156b161e8e tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: aabad6b5-1bf9-44ed-8fee-60a06c46f890] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1055.953122] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5aa2b1b-c290-42f2-84d3-272415184f14 could not be found. [ 1055.953474] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1055.953689] env[68443]: INFO nova.compute.manager [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1055.953964] env[68443]: DEBUG oslo.service.loopingcall [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1055.954200] env[68443]: DEBUG nova.compute.manager [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1055.954405] env[68443]: DEBUG nova.network.neutron [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1055.956945] env[68443]: DEBUG nova.compute.manager [None req-4371b7a8-64b0-4bae-af4b-23156b161e8e tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: aabad6b5-1bf9-44ed-8fee-60a06c46f890] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1055.982734] env[68443]: DEBUG nova.network.neutron [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.991355] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4371b7a8-64b0-4bae-af4b-23156b161e8e tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "aabad6b5-1bf9-44ed-8fee-60a06c46f890" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.907s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.994496] env[68443]: INFO nova.compute.manager [-] [instance: f5aa2b1b-c290-42f2-84d3-272415184f14] Took 0.04 seconds to deallocate network for instance. [ 1056.002814] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1056.061459] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.061459] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.062741] env[68443]: INFO nova.compute.claims [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.120870] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf799ce0-fb2d-4254-8183-19dba0e13900 tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "44aa2211-e364-46c6-9cad-a53f5563808b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.121596] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf799ce0-fb2d-4254-8183-19dba0e13900 tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "44aa2211-e364-46c6-9cad-a53f5563808b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.127671] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d56033b-9784-4da4-b993-5454555a5ea0 tempest-ServersV294TestFqdnHostnames-1104742354 tempest-ServersV294TestFqdnHostnames-1104742354-project-member] Lock "f5aa2b1b-c290-42f2-84d3-272415184f14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.226s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.463166] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413a864f-815b-4d37-ac5c-03d0e4095606 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.470992] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3aa064f-a513-4b97-b848-3a4e87ea2f5e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.500424] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7534e5bf-7348-41a6-9cbd-ed97d37b5f96 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.507780] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00520405-3c03-4da9-b5bf-89902b9f1abb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.522061] env[68443]: DEBUG nova.compute.provider_tree [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.530670] env[68443]: DEBUG nova.scheduler.client.report [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1056.545538] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.484s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.546222] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1056.585098] env[68443]: DEBUG nova.compute.utils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1056.586470] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1056.586839] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1056.597159] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1056.657999] env[68443]: DEBUG nova.policy [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd97a934ab8f48e2bf883cc4dddcdde1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dafe4b3f7d243caa51d39bfc74a4c11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1056.662975] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1056.698360] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1056.698778] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1056.699074] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.699368] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1056.699623] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.703019] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1056.703019] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1056.703019] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1056.703019] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1056.703019] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1056.703281] env[68443]: DEBUG nova.virt.hardware [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1056.703281] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bece76d-e1dc-4336-b65f-196366568ad7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.711617] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc242d6-f7b6-4a68-97fc-a813ab93fcf7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.961767] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Successfully created port: d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1057.978321] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Successfully updated port: d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.989215] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.989696] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.989882] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1058.056746] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1058.258379] env[68443]: DEBUG nova.compute.manager [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Received event network-vif-plugged-d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1058.258597] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Acquiring lock "3842d98e-d971-456c-b287-53c513285acf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.258804] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Lock "3842d98e-d971-456c-b287-53c513285acf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.258968] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Lock "3842d98e-d971-456c-b287-53c513285acf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.259150] env[68443]: DEBUG nova.compute.manager [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] No waiting events found dispatching network-vif-plugged-d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1058.259340] env[68443]: WARNING nova.compute.manager [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Received unexpected event network-vif-plugged-d5828c7c-e550-4c02-98c8-d9ac02a48ed3 for instance with vm_state building and task_state spawning. [ 1058.259509] env[68443]: DEBUG nova.compute.manager [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Received event network-changed-d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1058.259664] env[68443]: DEBUG nova.compute.manager [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Refreshing instance network info cache due to event network-changed-d5828c7c-e550-4c02-98c8-d9ac02a48ed3. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1058.259824] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Acquiring lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.331427] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Updating instance_info_cache with network_info: [{"id": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "address": "fa:16:3e:08:44:66", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5828c7c-e5", "ovs_interfaceid": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.349810] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.349920] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance network_info: |[{"id": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "address": "fa:16:3e:08:44:66", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5828c7c-e5", "ovs_interfaceid": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1058.350221] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Acquired lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.350396] env[68443]: DEBUG nova.network.neutron [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Refreshing network info cache for port d5828c7c-e550-4c02-98c8-d9ac02a48ed3 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1058.351553] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:44:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5828c7c-e550-4c02-98c8-d9ac02a48ed3', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.360565] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating folder: Project (3dafe4b3f7d243caa51d39bfc74a4c11). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1058.361373] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72dff544-bbf4-4100-8876-8a9694fdd264 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.377207] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created folder: Project (3dafe4b3f7d243caa51d39bfc74a4c11) in parent group-v673136. [ 1058.377207] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating folder: Instances. Parent ref: group-v673187. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1058.377207] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3909594d-1fd2-47d3-8dd1-1584b7dbaf4b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.389353] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created folder: Instances in parent group-v673187. [ 1058.389648] env[68443]: DEBUG oslo.service.loopingcall [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.389838] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1058.390250] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73d2a29d-e669-47c6-bf1f-e51d3db40092 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.412980] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.412980] env[68443]: value = "task-3373955" [ 1058.412980] env[68443]: _type = "Task" [ 1058.412980] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.424625] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373955, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.802336] env[68443]: DEBUG nova.network.neutron [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Updated VIF entry in instance network info cache for port d5828c7c-e550-4c02-98c8-d9ac02a48ed3. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1058.802913] env[68443]: DEBUG nova.network.neutron [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] [instance: 3842d98e-d971-456c-b287-53c513285acf] Updating instance_info_cache with network_info: [{"id": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "address": "fa:16:3e:08:44:66", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5828c7c-e5", "ovs_interfaceid": "d5828c7c-e550-4c02-98c8-d9ac02a48ed3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.813477] env[68443]: DEBUG oslo_concurrency.lockutils [req-7b55c3a1-5f87-4d3e-bf29-714f7be0325c req-183cdb1b-4b41-4ad7-be9f-e7df2a45809f service nova] Releasing lock "refresh_cache-3842d98e-d971-456c-b287-53c513285acf" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.926022] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373955, 'name': CreateVM_Task, 'duration_secs': 0.304107} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.926022] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1058.926022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.926022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.926022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1058.926331] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef214d2a-d2d7-41aa-b2f3-14bc6aeaf499 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.930037] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1058.930037] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52168176-464e-f7bc-6e47-519f72e41860" [ 1058.930037] env[68443]: _type = "Task" [ 1058.930037] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.938290] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52168176-464e-f7bc-6e47-519f72e41860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.441029] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.441324] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.441454] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1064.138084] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "91fd9c10-db96-4366-9548-13b36f94db6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.138401] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1078.825925] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.825754] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1082.825374] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1082.825718] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1082.825805] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1082.847540] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.847695] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.847811] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.847935] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848070] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848195] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848316] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848437] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848556] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1082.848805] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1082.849273] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1082.849459] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1082.878966] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.879215] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.879385] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.879542] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1082.880660] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82deb9d3-47b4-4b1f-aa3e-6398b169b763 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.889342] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37de204-0030-4212-aebf-f626f157ee34 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.902870] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e8c235-b77c-4558-a8d7-c608a6f157e2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.908942] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7b2eb4-abe5-46ed-bd3c-632ba1ebbe98 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.939948] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180952MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1082.940114] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.940315] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.018353] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018353] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018353] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018353] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018610] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018610] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018610] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018610] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018767] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.018767] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1083.029148] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.039533] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 767f1f71-6b02-4b88-83bf-dcbf9d87b895 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.049122] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 95bd5db1-4fff-45a2-84a0-6dd35c8463a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.058943] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.068332] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.078168] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 9ad6308e-05f6-4070-bbda-500e043265f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.088924] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e8c14375-4019-4d0d-9f98-6732a55faa89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.098277] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88ce1e8e-b0d3-4c98-8efe-c0258ef1b606 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.110037] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0591711b-fa55-403d-b8cc-c055b8867214 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.120712] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3b037d7-2a19-498f-b60e-7ac172411054 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.131167] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f385709-d38c-42bc-b250-e4260f7c8af8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.139270] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aa464980-860e-4c6f-a732-83d75503e4c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.148506] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3ec222f1-9af7-46f8-97ff-27a8f96bd4d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.158661] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7d79fd13-4514-4fef-b953-4d85af6af40d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.168702] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance c7488d31-9e3c-42d5-8bdc-51919522d556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.178012] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.187668] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44aa2211-e364-46c6-9cad-a53f5563808b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.196291] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1083.196531] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1083.196682] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1083.548418] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba4cdf2-0d51-484a-882e-0466390b3baa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.556205] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455dd5bd-20a5-482e-8c70-e6987e1b523f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.586031] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a3430d-a120-4d8b-878a-6564e5a2fda2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.593160] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feab96ab-ba00-4ea6-b510-bb3abbcaec74 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.605907] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.614640] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1083.629141] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1083.629345] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.689s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.605665] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.606000] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.606071] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1085.825454] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.820762] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.752721] env[68443]: WARNING oslo_vmware.rw_handles [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1103.752721] env[68443]: ERROR oslo_vmware.rw_handles [ 1103.753255] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1103.755840] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1103.756334] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Copying Virtual Disk [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/777edac7-afca-4d57-abbe-65a9b3c18d4d/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1103.756706] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22c0fc37-b539-4f5e-828f-94a9eaad102c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.764417] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for the task: (returnval){ [ 1103.764417] env[68443]: value = "task-3373956" [ 1103.764417] env[68443]: _type = "Task" [ 1103.764417] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.772607] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Task: {'id': task-3373956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.274823] env[68443]: DEBUG oslo_vmware.exceptions [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1104.275127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.275676] env[68443]: ERROR nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.275676] env[68443]: Faults: ['InvalidArgument'] [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Traceback (most recent call last): [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] yield resources [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self.driver.spawn(context, instance, image_meta, [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self._fetch_image_if_missing(context, vi) [ 1104.275676] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] image_cache(vi, tmp_image_ds_loc) [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] vm_util.copy_virtual_disk( [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] session._wait_for_task(vmdk_copy_task) [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return self.wait_for_task(task_ref) [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return evt.wait() [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] result = hub.switch() [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1104.276101] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return self.greenlet.switch() [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self.f(*self.args, **self.kw) [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] raise exceptions.translate_fault(task_info.error) [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Faults: ['InvalidArgument'] [ 1104.276359] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] [ 1104.276359] env[68443]: INFO nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Terminating instance [ 1104.277541] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.277782] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.278037] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b65bbc3-5c34-45af-a1a0-131d14bf1ceb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.281519] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1104.281742] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1104.282497] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b6b561-5d54-4d0a-a086-35bda75e8396 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.286187] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.286364] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1104.287552] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0d1c5fe-75aa-4b8f-995e-0c5efd821435 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.291469] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1104.291990] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b37d1d3-5a07-401b-82e4-0477b8bc8247 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.294320] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for the task: (returnval){ [ 1104.294320] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]522e922d-2df9-b7e8-f8ed-cda189ce71cc" [ 1104.294320] env[68443]: _type = "Task" [ 1104.294320] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.303413] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]522e922d-2df9-b7e8-f8ed-cda189ce71cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.364291] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1104.364591] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1104.364788] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Deleting the datastore file [datastore1] 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1104.365194] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c8756dd-c61b-4816-a4f4-46ab0dc1b2e9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.373169] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for the task: (returnval){ [ 1104.373169] env[68443]: value = "task-3373958" [ 1104.373169] env[68443]: _type = "Task" [ 1104.373169] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.380607] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Task: {'id': task-3373958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.804980] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1104.805258] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Creating directory with path [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.806048] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-002c2c98-889a-4067-9032-a6e34ccc495b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.816489] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Created directory with path [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.816489] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Fetch image to [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1104.816615] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1104.817325] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ba4533-4c6c-47a8-a4ce-fbacf349733d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.824272] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a7c921-0555-461a-9158-4914082f1912 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.832425] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07206309-7b86-41c9-9c9e-e40ac757dee3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.863467] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719ef17a-ca2d-43b7-a905-276ec8e1610c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.869379] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-129ebfe8-9a23-4afc-bc85-4e9283c00f43 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.881764] env[68443]: DEBUG oslo_vmware.api [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Task: {'id': task-3373958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074563} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.882008] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1104.882216] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1104.882395] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1104.882584] env[68443]: INFO nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1104.884751] env[68443]: DEBUG nova.compute.claims [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1104.884946] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.885181] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.901960] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1104.961023] env[68443]: DEBUG oslo_vmware.rw_handles [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1105.023440] env[68443]: DEBUG oslo_vmware.rw_handles [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1105.023683] env[68443]: DEBUG oslo_vmware.rw_handles [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1105.296128] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18921cfd-7329-4538-beeb-69ae5abb23eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.304007] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81afcd2-7572-457a-956b-31dce1a7a4d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.338114] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc76e392-8852-4fc8-b7af-0709d2cdfe4c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.346094] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fbf603-f6ca-454c-97de-5e3d73ef9ae6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.361199] env[68443]: DEBUG nova.compute.provider_tree [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.370425] env[68443]: DEBUG nova.scheduler.client.report [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1105.383859] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.499s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.384406] env[68443]: ERROR nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1105.384406] env[68443]: Faults: ['InvalidArgument'] [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Traceback (most recent call last): [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self.driver.spawn(context, instance, image_meta, [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self._fetch_image_if_missing(context, vi) [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] image_cache(vi, tmp_image_ds_loc) [ 1105.384406] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] vm_util.copy_virtual_disk( [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] session._wait_for_task(vmdk_copy_task) [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return self.wait_for_task(task_ref) [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return evt.wait() [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] result = hub.switch() [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] return self.greenlet.switch() [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1105.384729] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] self.f(*self.args, **self.kw) [ 1105.385050] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1105.385050] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] raise exceptions.translate_fault(task_info.error) [ 1105.385050] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1105.385050] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Faults: ['InvalidArgument'] [ 1105.385050] env[68443]: ERROR nova.compute.manager [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] [ 1105.385183] env[68443]: DEBUG nova.compute.utils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1105.386531] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Build of instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 was re-scheduled: A specified parameter was not correct: fileType [ 1105.386531] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1105.386914] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1105.387113] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1105.387289] env[68443]: DEBUG nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1105.387453] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.714705] env[68443]: DEBUG nova.network.neutron [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.724187] env[68443]: INFO nova.compute.manager [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Took 0.34 seconds to deallocate network for instance. [ 1105.818848] env[68443]: INFO nova.scheduler.client.report [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Deleted allocations for instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 [ 1105.839473] env[68443]: DEBUG oslo_concurrency.lockutils [None req-52f8da29-281a-4a79-856e-531542bb611b tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 379.377s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.840718] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 181.344s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.841823] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Acquiring lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.841823] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.841823] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.844031] env[68443]: INFO nova.compute.manager [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Terminating instance [ 1105.845369] env[68443]: DEBUG nova.compute.manager [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1105.845567] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1105.846055] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81751edd-e30c-491c-a6c0-bc06e3470583 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.855416] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d8b65c-ad6b-47b2-9a5f-e4abbdf7c754 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.866861] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1105.888083] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 280e1cc5-91db-4a03-bca4-b2d2e4ddd221 could not be found. [ 1105.888289] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1105.888464] env[68443]: INFO nova.compute.manager [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1105.888829] env[68443]: DEBUG oslo.service.loopingcall [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.889079] env[68443]: DEBUG nova.compute.manager [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1105.889179] env[68443]: DEBUG nova.network.neutron [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.916877] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1105.917143] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1105.918673] env[68443]: INFO nova.compute.claims [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.930853] env[68443]: DEBUG nova.network.neutron [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.938247] env[68443]: INFO nova.compute.manager [-] [instance: 280e1cc5-91db-4a03-bca4-b2d2e4ddd221] Took 0.05 seconds to deallocate network for instance. [ 1106.033432] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3ab74210-3b2e-4188-a510-0ae9a46c0da5 tempest-ImagesOneServerTestJSON-1896759534 tempest-ImagesOneServerTestJSON-1896759534-project-member] Lock "280e1cc5-91db-4a03-bca4-b2d2e4ddd221" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.277498] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625c3e35-0054-404e-90ce-b6b5180e1d29 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.283638] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0041b9f0-8621-429a-9efc-723ed6748304 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.312672] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f322c5a4-1bed-4036-b5d3-71de7e075431 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.319825] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9afdfc-fb96-4acd-a72a-b1ca14bf826f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.332588] env[68443]: DEBUG nova.compute.provider_tree [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.341972] env[68443]: DEBUG nova.scheduler.client.report [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1106.359082] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.442s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.359577] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1106.400482] env[68443]: DEBUG nova.compute.utils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1106.401778] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1106.401967] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1106.411035] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1106.463568] env[68443]: DEBUG nova.policy [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c9056fd69304807abfeb2fedc4ae20f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d0592ea4b3c49698b73391ae2be0ad8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1106.476919] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1106.502895] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1106.503879] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1106.504119] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.504321] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1106.504477] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.504630] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1106.504837] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1106.505008] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1106.505183] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1106.505346] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1106.505842] env[68443]: DEBUG nova.virt.hardware [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1106.506386] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d400d2-2330-43e1-85f5-cc772f92acc0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.515255] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c114ea-cf00-49d4-be22-d7ed451ce2d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.769823] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Successfully created port: 493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.383659] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Successfully updated port: 493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.400546] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.400845] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.401154] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1107.441621] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1107.613053] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Updating instance_info_cache with network_info: [{"id": "493c48de-97ef-48ed-be53-2cc191858be3", "address": "fa:16:3e:78:54:bf", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493c48de-97", "ovs_interfaceid": "493c48de-97ef-48ed-be53-2cc191858be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.626836] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.627227] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance network_info: |[{"id": "493c48de-97ef-48ed-be53-2cc191858be3", "address": "fa:16:3e:78:54:bf", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493c48de-97", "ovs_interfaceid": "493c48de-97ef-48ed-be53-2cc191858be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1107.627755] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:54:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '493c48de-97ef-48ed-be53-2cc191858be3', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.635866] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating folder: Project (5d0592ea4b3c49698b73391ae2be0ad8). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.636467] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ff5fc8e-e5b9-4c97-8881-36aa0a8a3053 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.649219] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created folder: Project (5d0592ea4b3c49698b73391ae2be0ad8) in parent group-v673136. [ 1107.649219] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating folder: Instances. Parent ref: group-v673190. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.649219] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6906d8a0-1fb0-4a63-a55b-ccd9fe7fffb0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.658330] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created folder: Instances in parent group-v673190. [ 1107.658567] env[68443]: DEBUG oslo.service.loopingcall [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.658754] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1107.658955] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bca314a7-3717-491f-aa51-0e4b80740f23 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.677823] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.677823] env[68443]: value = "task-3373961" [ 1107.677823] env[68443]: _type = "Task" [ 1107.677823] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.686385] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373961, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.187816] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373961, 'name': CreateVM_Task, 'duration_secs': 0.357426} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.187993] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1108.188686] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.188851] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.189168] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1108.189409] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a865214e-8ede-4de0-8bfd-d90c79dafd82 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.193990] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1108.193990] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52e3a26d-62e3-2d04-d8d2-c621cd7f9059" [ 1108.193990] env[68443]: _type = "Task" [ 1108.193990] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.201594] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52e3a26d-62e3-2d04-d8d2-c621cd7f9059, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.470214] env[68443]: DEBUG nova.compute.manager [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Received event network-vif-plugged-493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1108.470455] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Acquiring lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.470645] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1108.470828] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.470994] env[68443]: DEBUG nova.compute.manager [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] No waiting events found dispatching network-vif-plugged-493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1108.471170] env[68443]: WARNING nova.compute.manager [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Received unexpected event network-vif-plugged-493c48de-97ef-48ed-be53-2cc191858be3 for instance with vm_state building and task_state spawning. [ 1108.471323] env[68443]: DEBUG nova.compute.manager [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Received event network-changed-493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1108.471633] env[68443]: DEBUG nova.compute.manager [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Refreshing instance network info cache due to event network-changed-493c48de-97ef-48ed-be53-2cc191858be3. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1108.471633] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Acquiring lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.471771] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Acquired lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.471922] env[68443]: DEBUG nova.network.neutron [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Refreshing network info cache for port 493c48de-97ef-48ed-be53-2cc191858be3 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1108.712207] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.712529] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.716230] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.119022] env[68443]: DEBUG nova.network.neutron [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Updated VIF entry in instance network info cache for port 493c48de-97ef-48ed-be53-2cc191858be3. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.119375] env[68443]: DEBUG nova.network.neutron [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Updating instance_info_cache with network_info: [{"id": "493c48de-97ef-48ed-be53-2cc191858be3", "address": "fa:16:3e:78:54:bf", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493c48de-97", "ovs_interfaceid": "493c48de-97ef-48ed-be53-2cc191858be3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.131344] env[68443]: DEBUG oslo_concurrency.lockutils [req-bad28b11-2ffa-4999-863f-5ab5d39b43be req-50b9c6bd-6d8a-4b52-ae7c-a2364849daaa service nova] Releasing lock "refresh_cache-6e162408-6d3d-42e0-8992-f5843e9e7855" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.326063] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "3842d98e-d971-456c-b287-53c513285acf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.230235] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1131.232292] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.667793] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b187f8f5-a692-4f62-a0a2-c99437b9ac54 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Acquiring lock "1473cac8-4f3b-4c4a-ae12-a7e63e37233d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.668102] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b187f8f5-a692-4f62-a0a2-c99437b9ac54 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "1473cac8-4f3b-4c4a-ae12-a7e63e37233d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.662488] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a66cb5e3-236f-4bc5-b934-f8b827c980d1 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Acquiring lock "44678947-527b-40ea-9919-b1491b6f1be3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.662698] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a66cb5e3-236f-4bc5-b934-f8b827c980d1 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "44678947-527b-40ea-9919-b1491b6f1be3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.769187] env[68443]: DEBUG oslo_concurrency.lockutils [None req-159d19b8-0058-4483-be08-dc639f349fae tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Acquiring lock "578460b8-6965-4169-ba8c-a04a189af5be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.769497] env[68443]: DEBUG oslo_concurrency.lockutils [None req-159d19b8-0058-4483-be08-dc639f349fae tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "578460b8-6965-4169-ba8c-a04a189af5be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.824727] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.394347] env[68443]: DEBUG oslo_concurrency.lockutils [None req-02d98ff1-2944-41d3-b673-a6dcc1a2e920 tempest-TenantUsagesTestJSON-810919708 tempest-TenantUsagesTestJSON-810919708-project-member] Acquiring lock "17f0507a-8889-46e1-bce3-d2d423dc9a7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.395105] env[68443]: DEBUG oslo_concurrency.lockutils [None req-02d98ff1-2944-41d3-b673-a6dcc1a2e920 tempest-TenantUsagesTestJSON-810919708 tempest-TenantUsagesTestJSON-810919708-project-member] Lock "17f0507a-8889-46e1-bce3-d2d423dc9a7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.825096] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.369337] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6e162408-6d3d-42e0-8992-f5843e9e7855" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.824835] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.825052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1142.825180] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1142.852464] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.852464] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.852639] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.852689] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.852840] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.852974] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.853109] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.853230] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.853352] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.853470] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1142.853591] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1142.854127] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.164925] env[68443]: DEBUG oslo_concurrency.lockutils [None req-df12c16d-7129-4220-8f41-156e62ce0f8e tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Acquiring lock "63e4023e-8a82-4179-8b7f-53801f9bb744" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.164925] env[68443]: DEBUG oslo_concurrency.lockutils [None req-df12c16d-7129-4220-8f41-156e62ce0f8e tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Lock "63e4023e-8a82-4179-8b7f-53801f9bb744" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.654691] env[68443]: DEBUG oslo_concurrency.lockutils [None req-06e33e97-4f25-4fc8-823f-045355eec793 tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Acquiring lock "89765b72-d495-4a2a-9b97-e8d7d1d80f49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.654691] env[68443]: DEBUG oslo_concurrency.lockutils [None req-06e33e97-4f25-4fc8-823f-045355eec793 tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Lock "89765b72-d495-4a2a-9b97-e8d7d1d80f49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.824948] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.858455] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.873087] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.873308] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.873470] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.873624] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1144.875258] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13783cb3-c23f-47e0-9ca4-2bdf4c3e04df {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.887182] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef3e9fb-c1fd-43a0-9cc7-d677ceafd04c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.901705] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68af8ed9-c90a-4257-9511-3a115b0f8cfe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.911799] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0e4da1-c427-43b5-8bf7-cb5fdab082f0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.943867] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1144.943867] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.943867] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.019024] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019188] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019568] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019568] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019568] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019803] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019803] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.019931] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.020060] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.020177] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1145.034690] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 767f1f71-6b02-4b88-83bf-dcbf9d87b895 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.052563] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 95bd5db1-4fff-45a2-84a0-6dd35c8463a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.069976] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.083211] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.096785] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 9ad6308e-05f6-4070-bbda-500e043265f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.109024] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e8c14375-4019-4d0d-9f98-6732a55faa89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.120172] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 88ce1e8e-b0d3-4c98-8efe-c0258ef1b606 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.130274] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0591711b-fa55-403d-b8cc-c055b8867214 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.141461] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3b037d7-2a19-498f-b60e-7ac172411054 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.152093] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f385709-d38c-42bc-b250-e4260f7c8af8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.161388] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance aa464980-860e-4c6f-a732-83d75503e4c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.173568] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3ec222f1-9af7-46f8-97ff-27a8f96bd4d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.183882] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7d79fd13-4514-4fef-b953-4d85af6af40d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.194009] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance c7488d31-9e3c-42d5-8bdc-51919522d556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.203968] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.214276] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44aa2211-e364-46c6-9cad-a53f5563808b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.223473] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.233186] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.242564] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1473cac8-4f3b-4c4a-ae12-a7e63e37233d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.252387] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44678947-527b-40ea-9919-b1491b6f1be3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.261396] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 578460b8-6965-4169-ba8c-a04a189af5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.270706] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 17f0507a-8889-46e1-bce3-d2d423dc9a7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.280415] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63e4023e-8a82-4179-8b7f-53801f9bb744 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.289487] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 89765b72-d495-4a2a-9b97-e8d7d1d80f49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1145.289733] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1145.289880] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1145.721625] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b083aef-dfc4-4cfc-b83e-a385ac4c5d39 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.729228] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23040e76-a18d-4f4d-9971-28759874e4f0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.759953] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8399fb9-c623-40c3-90df-597b4f4536ae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.767075] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427a1dfa-eb28-4a05-8e2c-135e8e98d228 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.780037] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.789604] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1145.803979] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1145.804212] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.861s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.771634] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.820434] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.824225] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.824441] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.824588] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1152.833220] env[68443]: DEBUG oslo_concurrency.lockutils [None req-94e2fd78-7e45-4e02-8f38-a04ee4b283cc tempest-ImagesNegativeTestJSON-1935587335 tempest-ImagesNegativeTestJSON-1935587335-project-member] Acquiring lock "e232a1e8-c431-4b33-aa45-9de3a337f632" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.833518] env[68443]: DEBUG oslo_concurrency.lockutils [None req-94e2fd78-7e45-4e02-8f38-a04ee4b283cc tempest-ImagesNegativeTestJSON-1935587335 tempest-ImagesNegativeTestJSON-1935587335-project-member] Lock "e232a1e8-c431-4b33-aa45-9de3a337f632" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.188888] env[68443]: WARNING oslo_vmware.rw_handles [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1153.188888] env[68443]: ERROR oslo_vmware.rw_handles [ 1153.189645] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1153.191271] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1153.191641] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Copying Virtual Disk [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/98b667b1-9af8-4c6e-83c6-be0b32760f95/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1153.191845] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7eeef4a2-de20-4a28-9d26-7bf11846bf81 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.199420] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for the task: (returnval){ [ 1153.199420] env[68443]: value = "task-3373969" [ 1153.199420] env[68443]: _type = "Task" [ 1153.199420] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.210287] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Task: {'id': task-3373969, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.709845] env[68443]: DEBUG oslo_vmware.exceptions [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1153.710107] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.710753] env[68443]: ERROR nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1153.710753] env[68443]: Faults: ['InvalidArgument'] [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Traceback (most recent call last): [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] yield resources [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self.driver.spawn(context, instance, image_meta, [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self._fetch_image_if_missing(context, vi) [ 1153.710753] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] image_cache(vi, tmp_image_ds_loc) [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] vm_util.copy_virtual_disk( [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] session._wait_for_task(vmdk_copy_task) [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return self.wait_for_task(task_ref) [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return evt.wait() [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] result = hub.switch() [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1153.711132] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return self.greenlet.switch() [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self.f(*self.args, **self.kw) [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] raise exceptions.translate_fault(task_info.error) [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Faults: ['InvalidArgument'] [ 1153.711454] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] [ 1153.711454] env[68443]: INFO nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Terminating instance [ 1153.716271] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.716496] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.717157] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1153.717351] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1153.717586] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53eae8e5-a1ac-4603-9458-13a664cbbc4a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.720458] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460fd8ef-9094-43dc-9037-cb1f32755897 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.730953] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1153.731209] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac4b2e56-8a23-42ad-b118-dd3ea3c67e92 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.734641] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.734641] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1153.737111] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a27ff4bb-e928-4cdb-9f92-91232564e1a8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.742020] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for the task: (returnval){ [ 1153.742020] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]528311b9-d1c5-a5a7-ed46-be40980c3a71" [ 1153.742020] env[68443]: _type = "Task" [ 1153.742020] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.750215] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]528311b9-d1c5-a5a7-ed46-be40980c3a71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.800334] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1153.800334] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1153.800334] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Deleting the datastore file [datastore1] 08a980e1-ca8e-4af3-afbf-bd688e11259f {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.800334] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-700e5aa9-2867-4a8a-b812-995be55cef7d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.808017] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for the task: (returnval){ [ 1153.808017] env[68443]: value = "task-3373971" [ 1153.808017] env[68443]: _type = "Task" [ 1153.808017] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.815924] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Task: {'id': task-3373971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.250391] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1154.250763] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Creating directory with path [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.250884] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c966e2f-5e92-491d-99f7-972285857f5e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.262129] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Created directory with path [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.262334] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Fetch image to [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1154.262505] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1154.263302] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2440d885-560d-4441-8e56-5fc576693940 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.270222] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13f477a-4086-497b-98e1-8d831c2dbf24 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.285201] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f653123-31dc-4194-89b6-739c4d80a55b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.325975] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cf806e-febe-41b5-a097-06ab9a2fad0f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.336109] env[68443]: DEBUG oslo_vmware.api [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Task: {'id': task-3373971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069753} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.336109] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1154.336109] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1154.336109] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1154.336109] env[68443]: INFO nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1154.337550] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-78234aff-678a-4652-bc9b-3a719d7b20eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.341284] env[68443]: DEBUG nova.compute.claims [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1154.341623] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.342012] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.360921] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1154.421586] env[68443]: DEBUG oslo_vmware.rw_handles [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1154.492459] env[68443]: DEBUG oslo_vmware.rw_handles [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1154.492459] env[68443]: DEBUG oslo_vmware.rw_handles [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1154.941951] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337ab5f5-f5bc-4ea6-b94c-5e6e6fc95c73 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.949816] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a369062c-f35f-4257-8d00-b94b77208aa3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.979433] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558da611-4500-47fb-a2c3-c27825f9b8fd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.986962] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93aad499-c94b-43a4-bda1-537f651dbbe6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.001565] env[68443]: DEBUG nova.compute.provider_tree [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.013057] env[68443]: DEBUG nova.scheduler.client.report [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1155.033994] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.692s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.034562] env[68443]: ERROR nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1155.034562] env[68443]: Faults: ['InvalidArgument'] [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Traceback (most recent call last): [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self.driver.spawn(context, instance, image_meta, [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self._fetch_image_if_missing(context, vi) [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] image_cache(vi, tmp_image_ds_loc) [ 1155.034562] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] vm_util.copy_virtual_disk( [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] session._wait_for_task(vmdk_copy_task) [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return self.wait_for_task(task_ref) [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return evt.wait() [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] result = hub.switch() [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] return self.greenlet.switch() [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1155.034948] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] self.f(*self.args, **self.kw) [ 1155.035365] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1155.035365] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] raise exceptions.translate_fault(task_info.error) [ 1155.035365] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1155.035365] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Faults: ['InvalidArgument'] [ 1155.035365] env[68443]: ERROR nova.compute.manager [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] [ 1155.035365] env[68443]: DEBUG nova.compute.utils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1155.037652] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Build of instance 08a980e1-ca8e-4af3-afbf-bd688e11259f was re-scheduled: A specified parameter was not correct: fileType [ 1155.037652] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1155.038041] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1155.038220] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1155.038394] env[68443]: DEBUG nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1155.038591] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1155.370400] env[68443]: DEBUG nova.network.neutron [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.387051] env[68443]: INFO nova.compute.manager [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Took 0.35 seconds to deallocate network for instance. [ 1155.528043] env[68443]: INFO nova.scheduler.client.report [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Deleted allocations for instance 08a980e1-ca8e-4af3-afbf-bd688e11259f [ 1155.558244] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f793210a-02b9-4e58-aa3e-6eb46144b4f2 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 422.260s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.559509] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 221.544s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.559764] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Acquiring lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.559926] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.561888] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.563095] env[68443]: INFO nova.compute.manager [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Terminating instance [ 1155.568920] env[68443]: DEBUG nova.compute.manager [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1155.569350] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1155.569442] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7473749f-af9b-4f43-9fb6-3b6dd8a13f16 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.578276] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf311484-de4a-4c4f-99fe-81ef39efc907 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.590552] env[68443]: DEBUG nova.compute.manager [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 767f1f71-6b02-4b88-83bf-dcbf9d87b895] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1155.617277] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 08a980e1-ca8e-4af3-afbf-bd688e11259f could not be found. [ 1155.617277] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1155.617277] env[68443]: INFO nova.compute.manager [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1155.617277] env[68443]: DEBUG oslo.service.loopingcall [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1155.617462] env[68443]: DEBUG nova.compute.manager [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1155.617462] env[68443]: DEBUG nova.network.neutron [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1155.634774] env[68443]: DEBUG nova.compute.manager [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 767f1f71-6b02-4b88-83bf-dcbf9d87b895] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1155.660905] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "767f1f71-6b02-4b88-83bf-dcbf9d87b895" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.198s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.662593] env[68443]: DEBUG nova.network.neutron [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.671658] env[68443]: DEBUG nova.compute.manager [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 95bd5db1-4fff-45a2-84a0-6dd35c8463a2] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1155.674497] env[68443]: INFO nova.compute.manager [-] [instance: 08a980e1-ca8e-4af3-afbf-bd688e11259f] Took 0.06 seconds to deallocate network for instance. [ 1155.706468] env[68443]: DEBUG nova.compute.manager [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] [instance: 95bd5db1-4fff-45a2-84a0-6dd35c8463a2] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1155.735860] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8f361853-4d2d-4481-bec4-8155163e22e0 tempest-MultipleCreateTestJSON-1060821449 tempest-MultipleCreateTestJSON-1060821449-project-member] Lock "95bd5db1-4fff-45a2-84a0-6dd35c8463a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.237s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.747264] env[68443]: DEBUG nova.compute.manager [None req-76a4bab5-8f17-482f-a227-c2ffcbed392f tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] [instance: ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1155.779488] env[68443]: DEBUG nova.compute.manager [None req-76a4bab5-8f17-482f-a227-c2ffcbed392f tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] [instance: ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1155.790573] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a560b179-4e1b-4505-b452-d8dac37f09f9 tempest-VolumesAssistedSnapshotsTest-889531104 tempest-VolumesAssistedSnapshotsTest-889531104-project-member] Lock "08a980e1-ca8e-4af3-afbf-bd688e11259f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.230s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.807599] env[68443]: DEBUG oslo_concurrency.lockutils [None req-76a4bab5-8f17-482f-a227-c2ffcbed392f tempest-ServersTestMultiNic-1218264561 tempest-ServersTestMultiNic-1218264561-project-member] Lock "ff1a64cd-c34a-4907-a40d-ddfbb28dbc7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.438s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.817783] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1155.896269] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.896560] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.898569] env[68443]: INFO nova.compute.claims [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1156.451334] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6122f47-f413-4bcc-81dc-51c6b3a17fe6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.459134] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53388c38-72e5-4b16-9edd-6f88f8187fe4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.495079] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c5db5e-713f-411c-bf5f-c48b3a386d08 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.503401] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b9cd14-c77f-4f81-951e-668b22e6dcbc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.517207] env[68443]: DEBUG nova.compute.provider_tree [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.525945] env[68443]: DEBUG nova.scheduler.client.report [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1156.548306] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.651s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.548663] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1156.607108] env[68443]: DEBUG nova.compute.utils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1156.607991] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1156.608070] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1156.629272] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1156.694847] env[68443]: DEBUG nova.policy [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '267f1cc5982049579842611acbadcb85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6693fbeca44449939d27838029d25353', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1156.700470] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1156.729546] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1156.729546] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1156.729546] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1156.729711] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1156.729711] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1156.729711] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1156.730304] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1156.730609] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1156.730892] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1156.731198] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1156.731482] env[68443]: DEBUG nova.virt.hardware [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1156.732451] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac7b5f5-af3c-45f5-8d58-2e18c62eea21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.747392] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd42d8af-2261-44e0-8e56-b37e8151609c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.063765] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Successfully created port: 3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1157.514936] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.515450] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.667696] env[68443]: DEBUG nova.compute.manager [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Received event network-vif-plugged-3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1157.667942] env[68443]: DEBUG oslo_concurrency.lockutils [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] Acquiring lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1157.668175] env[68443]: DEBUG oslo_concurrency.lockutils [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1157.668438] env[68443]: DEBUG oslo_concurrency.lockutils [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.668554] env[68443]: DEBUG nova.compute.manager [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] No waiting events found dispatching network-vif-plugged-3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1157.668727] env[68443]: WARNING nova.compute.manager [req-6912a7d5-701d-4d7c-9744-e57887da748d req-9ee83cb7-5b7a-441b-a5e9-cc6489b34835 service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Received unexpected event network-vif-plugged-3e7b1244-b8a1-45ba-8ce7-1296149808f8 for instance with vm_state building and task_state spawning. [ 1157.731114] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Successfully updated port: 3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1157.742352] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.742489] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.742632] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1157.973050] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1158.142075] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Updating instance_info_cache with network_info: [{"id": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "address": "fa:16:3e:4f:1a:8f", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e7b1244-b8", "ovs_interfaceid": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.157053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.157053] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance network_info: |[{"id": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "address": "fa:16:3e:4f:1a:8f", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e7b1244-b8", "ovs_interfaceid": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1158.157386] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:1a:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e7b1244-b8a1-45ba-8ce7-1296149808f8', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1158.165108] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating folder: Project (6693fbeca44449939d27838029d25353). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1158.165770] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-501f9399-2cc1-4126-bbbb-ce49747aac11 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.180032] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created folder: Project (6693fbeca44449939d27838029d25353) in parent group-v673136. [ 1158.180032] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating folder: Instances. Parent ref: group-v673197. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1158.180032] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7dc8a72f-5b66-4e3d-9a58-72db86caf998 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.188291] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created folder: Instances in parent group-v673197. [ 1158.188680] env[68443]: DEBUG oslo.service.loopingcall [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1158.188959] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1158.189278] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c81899a-14d5-4044-b875-0697de8aa6d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.209451] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1158.209451] env[68443]: value = "task-3373976" [ 1158.209451] env[68443]: _type = "Task" [ 1158.209451] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.218168] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373976, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.717632] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373976, 'name': CreateVM_Task} progress is 25%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.219879] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373976, 'name': CreateVM_Task, 'duration_secs': 0.94087} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.220058] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1159.220732] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.220897] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.221233] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1159.221484] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d88cc4cf-0bee-49a8-808d-eea8905cb438 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.225984] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1159.225984] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520cd33e-f409-7fbc-17a4-b4ec41bb3d34" [ 1159.225984] env[68443]: _type = "Task" [ 1159.225984] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.234274] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520cd33e-f409-7fbc-17a4-b4ec41bb3d34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.735387] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.735693] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1159.735855] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.963182] env[68443]: DEBUG nova.compute.manager [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Received event network-changed-3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1159.963398] env[68443]: DEBUG nova.compute.manager [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Refreshing instance network info cache due to event network-changed-3e7b1244-b8a1-45ba-8ce7-1296149808f8. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1159.963614] env[68443]: DEBUG oslo_concurrency.lockutils [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] Acquiring lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.963757] env[68443]: DEBUG oslo_concurrency.lockutils [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] Acquired lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.963915] env[68443]: DEBUG nova.network.neutron [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Refreshing network info cache for port 3e7b1244-b8a1-45ba-8ce7-1296149808f8 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1160.021474] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.302356] env[68443]: DEBUG nova.network.neutron [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Updated VIF entry in instance network info cache for port 3e7b1244-b8a1-45ba-8ce7-1296149808f8. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1160.302926] env[68443]: DEBUG nova.network.neutron [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Updating instance_info_cache with network_info: [{"id": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "address": "fa:16:3e:4f:1a:8f", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e7b1244-b8", "ovs_interfaceid": "3e7b1244-b8a1-45ba-8ce7-1296149808f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.312034] env[68443]: DEBUG oslo_concurrency.lockutils [req-cd2d08bb-3400-4557-b495-55204975e1cd req-0d94fdbe-78fe-4973-afda-069e2b19548a service nova] Releasing lock "refresh_cache-fd0de9a2-7a54-46be-8b6a-3415366e110c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1161.519124] env[68443]: DEBUG oslo_concurrency.lockutils [None req-682b7980-f0e9-4b5b-81c0-d4aabe4302a2 tempest-ServersTestBootFromVolume-1371864501 tempest-ServersTestBootFromVolume-1371864501-project-member] Acquiring lock "bd894d43-4d8f-438b-aea8-29bcb43c77fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.519463] env[68443]: DEBUG oslo_concurrency.lockutils [None req-682b7980-f0e9-4b5b-81c0-d4aabe4302a2 tempest-ServersTestBootFromVolume-1371864501 tempest-ServersTestBootFromVolume-1371864501-project-member] Lock "bd894d43-4d8f-438b-aea8-29bcb43c77fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.399160] env[68443]: DEBUG oslo_concurrency.lockutils [None req-324090ca-523a-4825-a40f-8cb430d6d77e tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Acquiring lock "ccf8bec6-77c7-4208-a808-e0b012c04f98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.399537] env[68443]: DEBUG oslo_concurrency.lockutils [None req-324090ca-523a-4825-a40f-8cb430d6d77e tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "ccf8bec6-77c7-4208-a808-e0b012c04f98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.605566] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c5c745fc-051f-4a02-a514-acef8638f364 tempest-InstanceActionsV221TestJSON-2003231307 tempest-InstanceActionsV221TestJSON-2003231307-project-member] Acquiring lock "7a989ca4-b091-457b-a9ef-57083a8a285e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.605883] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c5c745fc-051f-4a02-a514-acef8638f364 tempest-InstanceActionsV221TestJSON-2003231307 tempest-InstanceActionsV221TestJSON-2003231307-project-member] Lock "7a989ca4-b091-457b-a9ef-57083a8a285e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.288939] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0bd38fdc-f800-48b5-813f-04689303a057 tempest-ServersAaction247Test-1630594896 tempest-ServersAaction247Test-1630594896-project-member] Acquiring lock "2a6b143c-5702-4ca4-81c7-8114ecfb441d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.289250] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0bd38fdc-f800-48b5-813f-04689303a057 tempest-ServersAaction247Test-1630594896 tempest-ServersAaction247Test-1630594896-project-member] Lock "2a6b143c-5702-4ca4-81c7-8114ecfb441d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.731447] env[68443]: DEBUG oslo_concurrency.lockutils [None req-216d17d2-3ee0-45a0-91d7-ae6a8520e9f7 tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Acquiring lock "196d4d2c-ca4a-47ac-a448-f1caa0fe0854" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.731447] env[68443]: DEBUG oslo_concurrency.lockutils [None req-216d17d2-3ee0-45a0-91d7-ae6a8520e9f7 tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Lock "196d4d2c-ca4a-47ac-a448-f1caa0fe0854" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1200.825590] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.825974] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.224733] env[68443]: DEBUG oslo_concurrency.lockutils [None req-eb5193c6-d9bd-4e97-9578-b054701fb85e tempest-ServerPasswordTestJSON-1337937232 tempest-ServerPasswordTestJSON-1337937232-project-member] Acquiring lock "b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.224981] env[68443]: DEBUG oslo_concurrency.lockutils [None req-eb5193c6-d9bd-4e97-9578-b054701fb85e tempest-ServerPasswordTestJSON-1337937232 tempest-ServerPasswordTestJSON-1337937232-project-member] Lock "b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.825186] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.011979] env[68443]: WARNING oslo_vmware.rw_handles [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1203.011979] env[68443]: ERROR oslo_vmware.rw_handles [ 1203.012390] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1203.014288] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1203.014529] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Copying Virtual Disk [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/284c3d5a-a103-4f86-aac4-484c3d81d122/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1203.014892] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7888601c-346b-4445-965f-f75124cd1914 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.026679] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for the task: (returnval){ [ 1203.026679] env[68443]: value = "task-3373978" [ 1203.026679] env[68443]: _type = "Task" [ 1203.026679] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.038063] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Task: {'id': task-3373978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.536768] env[68443]: DEBUG oslo_vmware.exceptions [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1203.537131] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.537616] env[68443]: ERROR nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1203.537616] env[68443]: Faults: ['InvalidArgument'] [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Traceback (most recent call last): [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] yield resources [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self.driver.spawn(context, instance, image_meta, [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self._fetch_image_if_missing(context, vi) [ 1203.537616] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] image_cache(vi, tmp_image_ds_loc) [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] vm_util.copy_virtual_disk( [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] session._wait_for_task(vmdk_copy_task) [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return self.wait_for_task(task_ref) [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return evt.wait() [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] result = hub.switch() [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1203.538028] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return self.greenlet.switch() [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self.f(*self.args, **self.kw) [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] raise exceptions.translate_fault(task_info.error) [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Faults: ['InvalidArgument'] [ 1203.538405] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] [ 1203.538405] env[68443]: INFO nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Terminating instance [ 1203.539577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.539795] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1203.540047] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd215d91-39bb-4158-ad34-fd26fd3116c2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.542216] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1203.542408] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1203.543125] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6052f4-c3b2-405b-9a84-3509138f3c83 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.549792] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1203.550188] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b2dbf64-108c-4fcc-b497-a1f453c49ea3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.552263] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1203.552437] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1203.553394] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553528ed-cc13-4d43-97a6-e654408cda43 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.557983] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1203.557983] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]521f366b-9eb7-d21c-99ef-463a5d82d90f" [ 1203.557983] env[68443]: _type = "Task" [ 1203.557983] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.568824] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]521f366b-9eb7-d21c-99ef-463a5d82d90f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.625448] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1203.625674] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1203.625900] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Deleting the datastore file [datastore1] 8fdbd88f-f608-4f26-9076-7d2f6eb67224 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.626194] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2931022-c82c-4625-afb2-b29ee1a5220d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.636031] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for the task: (returnval){ [ 1203.636031] env[68443]: value = "task-3373980" [ 1203.636031] env[68443]: _type = "Task" [ 1203.636031] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.643395] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Task: {'id': task-3373980, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.825261] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.825491] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1203.825636] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1203.847096] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847255] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 63801b63-1601-4e77-a500-3569713177bd] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847442] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847504] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847725] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847725] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.847843] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.848053] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.848236] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.848299] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1203.848416] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1204.069855] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1204.070158] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating directory with path [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.070393] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47d04530-4517-4539-bd33-53a019e79d7c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.082050] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created directory with path [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.082050] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Fetch image to [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1204.082050] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1204.082769] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda90ab0-76e6-4f17-931a-05e82419151b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.089352] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584fa268-3cee-4937-96f0-101360f788d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.098074] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac03a02-a681-48be-89bb-61d8b64a6553 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.127921] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358f0feb-66d9-4aa4-8a2c-84ec7d3b6189 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.133532] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd860f5c-e5dc-4cb6-9d5a-c66cbd2abbe7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.143517] env[68443]: DEBUG oslo_vmware.api [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Task: {'id': task-3373980, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074639} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.143752] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.143989] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1204.144218] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1204.144415] env[68443]: INFO nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1204.146604] env[68443]: DEBUG nova.compute.claims [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1204.146808] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.147063] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.153349] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1204.203693] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1204.265052] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1204.265284] env[68443]: DEBUG oslo_vmware.rw_handles [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1204.565792] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9273d274-cdd4-4af0-9dbe-d72b1a37330b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.573404] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e1c05-c567-4a1f-852d-f4941c28f236 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.604695] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd831268-ca9d-4d73-8b3d-154a6ed1818a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.612082] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52b91b3-4867-4176-a0d9-be7e5ebd4a23 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.624749] env[68443]: DEBUG nova.compute.provider_tree [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.635095] env[68443]: DEBUG nova.scheduler.client.report [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1204.653437] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.506s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1204.654010] env[68443]: ERROR nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1204.654010] env[68443]: Faults: ['InvalidArgument'] [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Traceback (most recent call last): [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self.driver.spawn(context, instance, image_meta, [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self._fetch_image_if_missing(context, vi) [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] image_cache(vi, tmp_image_ds_loc) [ 1204.654010] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] vm_util.copy_virtual_disk( [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] session._wait_for_task(vmdk_copy_task) [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return self.wait_for_task(task_ref) [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return evt.wait() [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] result = hub.switch() [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] return self.greenlet.switch() [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1204.654358] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] self.f(*self.args, **self.kw) [ 1204.654710] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1204.654710] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] raise exceptions.translate_fault(task_info.error) [ 1204.654710] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1204.654710] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Faults: ['InvalidArgument'] [ 1204.654710] env[68443]: ERROR nova.compute.manager [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] [ 1204.654852] env[68443]: DEBUG nova.compute.utils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1204.656663] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Build of instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 was re-scheduled: A specified parameter was not correct: fileType [ 1204.656663] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1204.657042] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1204.657224] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1204.657398] env[68443]: DEBUG nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1204.657561] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1204.976036] env[68443]: DEBUG nova.network.neutron [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.990715] env[68443]: INFO nova.compute.manager [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Took 0.33 seconds to deallocate network for instance. [ 1205.084767] env[68443]: INFO nova.scheduler.client.report [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Deleted allocations for instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 [ 1205.105361] env[68443]: DEBUG oslo_concurrency.lockutils [None req-994eecd1-af74-4706-a8a2-45c9f85235e8 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 474.490s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.106598] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 273.644s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.106833] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Acquiring lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.107054] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.107232] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.109310] env[68443]: INFO nova.compute.manager [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Terminating instance [ 1205.111084] env[68443]: DEBUG nova.compute.manager [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1205.111296] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1205.111753] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-840bb628-bc88-4fb5-be65-60999971436c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.121881] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38680fa1-f5ab-47ce-882e-337c45616d4b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.132629] env[68443]: DEBUG nova.compute.manager [None req-cbde4992-026f-49d4-92d1-2394747ecb7c tempest-ServersNegativeTestMultiTenantJSON-1168988682 tempest-ServersNegativeTestMultiTenantJSON-1168988682-project-member] [instance: 9ad6308e-05f6-4070-bbda-500e043265f0] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.154271] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8fdbd88f-f608-4f26-9076-7d2f6eb67224 could not be found. [ 1205.154489] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1205.154664] env[68443]: INFO nova.compute.manager [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1205.154906] env[68443]: DEBUG oslo.service.loopingcall [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1205.155154] env[68443]: DEBUG nova.compute.manager [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1205.155297] env[68443]: DEBUG nova.network.neutron [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1205.157630] env[68443]: DEBUG nova.compute.manager [None req-cbde4992-026f-49d4-92d1-2394747ecb7c tempest-ServersNegativeTestMultiTenantJSON-1168988682 tempest-ServersNegativeTestMultiTenantJSON-1168988682-project-member] [instance: 9ad6308e-05f6-4070-bbda-500e043265f0] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.178950] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cbde4992-026f-49d4-92d1-2394747ecb7c tempest-ServersNegativeTestMultiTenantJSON-1168988682 tempest-ServersNegativeTestMultiTenantJSON-1168988682-project-member] Lock "9ad6308e-05f6-4070-bbda-500e043265f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.620s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.181043] env[68443]: DEBUG nova.network.neutron [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.188478] env[68443]: INFO nova.compute.manager [-] [instance: 8fdbd88f-f608-4f26-9076-7d2f6eb67224] Took 0.03 seconds to deallocate network for instance. [ 1205.193692] env[68443]: DEBUG nova.compute.manager [None req-a27ab00b-22ac-4ee6-8277-d7287f98d8ed tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] [instance: e8c14375-4019-4d0d-9f98-6732a55faa89] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.215774] env[68443]: DEBUG nova.compute.manager [None req-a27ab00b-22ac-4ee6-8277-d7287f98d8ed tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] [instance: e8c14375-4019-4d0d-9f98-6732a55faa89] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.234805] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a27ab00b-22ac-4ee6-8277-d7287f98d8ed tempest-SecurityGroupsTestJSON-1267966769 tempest-SecurityGroupsTestJSON-1267966769-project-member] Lock "e8c14375-4019-4d0d-9f98-6732a55faa89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.296s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.243823] env[68443]: DEBUG nova.compute.manager [None req-1711075c-551b-49a9-95bd-3cc1c7c03de9 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: 88ce1e8e-b0d3-4c98-8efe-c0258ef1b606] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.271950] env[68443]: DEBUG nova.compute.manager [None req-1711075c-551b-49a9-95bd-3cc1c7c03de9 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: 88ce1e8e-b0d3-4c98-8efe-c0258ef1b606] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.285064] env[68443]: DEBUG oslo_concurrency.lockutils [None req-15fb12b1-fb94-4529-a2b6-e8b0eb7ef233 tempest-ServersWithSpecificFlavorTestJSON-1470859837 tempest-ServersWithSpecificFlavorTestJSON-1470859837-project-member] Lock "8fdbd88f-f608-4f26-9076-7d2f6eb67224" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.293584] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1711075c-551b-49a9-95bd-3cc1c7c03de9 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "88ce1e8e-b0d3-4c98-8efe-c0258ef1b606" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.564s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.301865] env[68443]: DEBUG nova.compute.manager [None req-055543e0-11d9-42da-b447-2c86344c5477 tempest-ServersTestFqdnHostnames-23424126 tempest-ServersTestFqdnHostnames-23424126-project-member] [instance: 0591711b-fa55-403d-b8cc-c055b8867214] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.324132] env[68443]: DEBUG nova.compute.manager [None req-055543e0-11d9-42da-b447-2c86344c5477 tempest-ServersTestFqdnHostnames-23424126 tempest-ServersTestFqdnHostnames-23424126-project-member] [instance: 0591711b-fa55-403d-b8cc-c055b8867214] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.342341] env[68443]: DEBUG oslo_concurrency.lockutils [None req-055543e0-11d9-42da-b447-2c86344c5477 tempest-ServersTestFqdnHostnames-23424126 tempest-ServersTestFqdnHostnames-23424126-project-member] Lock "0591711b-fa55-403d-b8cc-c055b8867214" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.258s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.350653] env[68443]: DEBUG nova.compute.manager [None req-c7b2aef3-e736-46ee-8ce7-e09d3950e5c6 tempest-ServerRescueTestJSONUnderV235-759347534 tempest-ServerRescueTestJSONUnderV235-759347534-project-member] [instance: f3b037d7-2a19-498f-b60e-7ac172411054] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.372332] env[68443]: DEBUG nova.compute.manager [None req-c7b2aef3-e736-46ee-8ce7-e09d3950e5c6 tempest-ServerRescueTestJSONUnderV235-759347534 tempest-ServerRescueTestJSONUnderV235-759347534-project-member] [instance: f3b037d7-2a19-498f-b60e-7ac172411054] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.392865] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c7b2aef3-e736-46ee-8ce7-e09d3950e5c6 tempest-ServerRescueTestJSONUnderV235-759347534 tempest-ServerRescueTestJSONUnderV235-759347534-project-member] Lock "f3b037d7-2a19-498f-b60e-7ac172411054" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.742s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.403427] env[68443]: DEBUG nova.compute.manager [None req-c8e57b53-27af-4072-bbb7-129551ef0dd1 tempest-ServerMetadataTestJSON-159372343 tempest-ServerMetadataTestJSON-159372343-project-member] [instance: 1f385709-d38c-42bc-b250-e4260f7c8af8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.429210] env[68443]: DEBUG nova.compute.manager [None req-c8e57b53-27af-4072-bbb7-129551ef0dd1 tempest-ServerMetadataTestJSON-159372343 tempest-ServerMetadataTestJSON-159372343-project-member] [instance: 1f385709-d38c-42bc-b250-e4260f7c8af8] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.453236] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8e57b53-27af-4072-bbb7-129551ef0dd1 tempest-ServerMetadataTestJSON-159372343 tempest-ServerMetadataTestJSON-159372343-project-member] Lock "1f385709-d38c-42bc-b250-e4260f7c8af8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.555s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.461940] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: aa464980-860e-4c6f-a732-83d75503e4c9] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.484905] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: aa464980-860e-4c6f-a732-83d75503e4c9] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.504682] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "aa464980-860e-4c6f-a732-83d75503e4c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.075s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.513295] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: 3ec222f1-9af7-46f8-97ff-27a8f96bd4d8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.538500] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: 3ec222f1-9af7-46f8-97ff-27a8f96bd4d8] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.558531] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "3ec222f1-9af7-46f8-97ff-27a8f96bd4d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.104s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.566859] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: 7d79fd13-4514-4fef-b953-4d85af6af40d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.590286] env[68443]: DEBUG nova.compute.manager [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] [instance: 7d79fd13-4514-4fef-b953-4d85af6af40d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.612803] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1dc28f3d-a30d-4823-9635-42f4bf0e4d10 tempest-ListServersNegativeTestJSON-1964244822 tempest-ListServersNegativeTestJSON-1964244822-project-member] Lock "7d79fd13-4514-4fef-b953-4d85af6af40d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.130s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.623550] env[68443]: DEBUG nova.compute.manager [None req-8c75c97b-6d9f-407b-a644-d28d29e2e261 tempest-ServerActionsV293TestJSON-2331396 tempest-ServerActionsV293TestJSON-2331396-project-member] [instance: c7488d31-9e3c-42d5-8bdc-51919522d556] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.650066] env[68443]: DEBUG nova.compute.manager [None req-8c75c97b-6d9f-407b-a644-d28d29e2e261 tempest-ServerActionsV293TestJSON-2331396 tempest-ServerActionsV293TestJSON-2331396-project-member] [instance: c7488d31-9e3c-42d5-8bdc-51919522d556] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1205.677326] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8c75c97b-6d9f-407b-a644-d28d29e2e261 tempest-ServerActionsV293TestJSON-2331396 tempest-ServerActionsV293TestJSON-2331396-project-member] Lock "c7488d31-9e3c-42d5-8bdc-51919522d556" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.870s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.686927] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1205.743473] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.743730] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.745309] env[68443]: INFO nova.compute.claims [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1205.826116] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.840699] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.130048] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bbc0c5-b94d-4a1a-8c98-597564b45bd1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.137895] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d88874b-4b3a-438f-9186-9a04f86a2037 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.167195] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b45ddf-77bd-40ea-b465-2e1d4b22fe03 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.174494] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc3124-0d96-44aa-baaf-1e16d74a51aa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.188302] env[68443]: DEBUG nova.compute.provider_tree [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.196617] env[68443]: DEBUG nova.scheduler.client.report [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1206.211505] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.468s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.211979] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1206.214478] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.374s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.214937] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.215142] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1206.216966] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc63ce4c-14e7-41ec-bcb8-910f398c6ad6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.226855] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa2e8a4-2954-48d0-a796-85cbdf967967 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.241184] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0919692-bfa9-46b0-b1ac-ed0b36941e66 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.246561] env[68443]: DEBUG nova.compute.utils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1206.249437] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1206.249608] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1206.252499] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62684fe-19f4-4a17-bd15-dd7af1e79fc2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.256582] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1206.285072] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180990MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1206.285231] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.285427] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.311219] env[68443]: DEBUG nova.policy [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eb39994728c486ab572c6fd7acd1bb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9d22d78a3f8410c858ba3f85fb453c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1206.319808] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1206.349238] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1206.349496] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1206.349662] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1206.349854] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1206.350028] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1206.350202] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1206.350408] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1206.350614] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1206.350793] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1206.350958] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1206.351144] env[68443]: DEBUG nova.virt.hardware [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1206.352362] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59d1d56-679c-4320-91c3-a1441a42b93d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.367055] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce22000a-2ce0-4304-84fd-f5179f0a50d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.381679] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63801b63-1601-4e77-a500-3569713177bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.381831] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.381960] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382098] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382218] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382335] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382448] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382562] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382671] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.382782] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.394356] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44aa2211-e364-46c6-9cad-a53f5563808b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.405464] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.418971] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.432891] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1473cac8-4f3b-4c4a-ae12-a7e63e37233d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.444047] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44678947-527b-40ea-9919-b1491b6f1be3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.456738] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 578460b8-6965-4169-ba8c-a04a189af5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.468153] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 17f0507a-8889-46e1-bce3-d2d423dc9a7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.478607] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63e4023e-8a82-4179-8b7f-53801f9bb744 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.488614] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 89765b72-d495-4a2a-9b97-e8d7d1d80f49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.498684] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e232a1e8-c431-4b33-aa45-9de3a337f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.508555] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.519120] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bd894d43-4d8f-438b-aea8-29bcb43c77fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.528693] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ccf8bec6-77c7-4208-a808-e0b012c04f98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.538572] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7a989ca4-b091-457b-a9ef-57083a8a285e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.548092] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2a6b143c-5702-4ca4-81c7-8114ecfb441d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.558024] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 196d4d2c-ca4a-47ac-a448-f1caa0fe0854 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.568205] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.568529] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1206.568529] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1206.686830] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Successfully created port: 9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1206.932447] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcb3c5a-8fd0-4f35-9dc1-778446bb9721 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.940055] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2bc46a-01c1-423d-a917-b1bde98d4662 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.971396] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947949b4-4deb-4599-88d4-0bc2c13a3a3a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.978625] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b31873f-e57b-401d-b522-31f2a901055f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.998345] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.008914] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1207.026938] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1207.027053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.742s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.334326] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Successfully updated port: 9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1207.350533] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1207.350666] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.350807] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1207.411864] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1207.480675] env[68443]: DEBUG nova.compute.manager [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Received event network-vif-plugged-9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1207.480893] env[68443]: DEBUG oslo_concurrency.lockutils [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] Acquiring lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1207.481118] env[68443]: DEBUG oslo_concurrency.lockutils [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1207.481286] env[68443]: DEBUG oslo_concurrency.lockutils [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.481449] env[68443]: DEBUG nova.compute.manager [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] No waiting events found dispatching network-vif-plugged-9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1207.481611] env[68443]: WARNING nova.compute.manager [req-31a68350-a805-4cfe-b52b-65879794180c req-81827880-d592-49c6-8357-3949ba0b06c3 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Received unexpected event network-vif-plugged-9ff274e0-27c5-464b-a8f4-5d166dbd662c for instance with vm_state building and task_state spawning. [ 1207.610620] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Updating instance_info_cache with network_info: [{"id": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "address": "fa:16:3e:84:24:2b", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff274e0-27", "ovs_interfaceid": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.622973] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1207.623332] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance network_info: |[{"id": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "address": "fa:16:3e:84:24:2b", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff274e0-27", "ovs_interfaceid": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1207.623762] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:24:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ff274e0-27c5-464b-a8f4-5d166dbd662c', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.631525] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating folder: Project (b9d22d78a3f8410c858ba3f85fb453c3). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1207.632045] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d81c4ab6-7641-43f6-a406-3a794f7c2cbf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.643524] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created folder: Project (b9d22d78a3f8410c858ba3f85fb453c3) in parent group-v673136. [ 1207.643710] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating folder: Instances. Parent ref: group-v673200. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1207.643928] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbe9a9f6-5f1f-472b-85fb-d92f67487d43 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.652910] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created folder: Instances in parent group-v673200. [ 1207.653147] env[68443]: DEBUG oslo.service.loopingcall [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1207.653327] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1207.653516] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4954f44c-9906-453f-bbd3-75e29528a3e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.671875] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.671875] env[68443]: value = "task-3373983" [ 1207.671875] env[68443]: _type = "Task" [ 1207.671875] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.680404] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373983, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.021008] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.021275] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.021436] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.021578] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1208.182252] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373983, 'name': CreateVM_Task} progress is 99%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.681955] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373983, 'name': CreateVM_Task, 'duration_secs': 0.533302} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.682266] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1208.682836] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.682999] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.683320] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1208.683564] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a12fa6db-25c0-4331-ab5d-ba60fdf281fa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.687904] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1208.687904] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520649ea-7696-709f-231b-9b54dd0832c9" [ 1208.687904] env[68443]: _type = "Task" [ 1208.687904] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.698649] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520649ea-7696-709f-231b-9b54dd0832c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.825848] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.204243] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.204562] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1209.205111] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.546021] env[68443]: DEBUG nova.compute.manager [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Received event network-changed-9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1209.546021] env[68443]: DEBUG nova.compute.manager [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Refreshing instance network info cache due to event network-changed-9ff274e0-27c5-464b-a8f4-5d166dbd662c. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1209.546021] env[68443]: DEBUG oslo_concurrency.lockutils [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] Acquiring lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1209.546021] env[68443]: DEBUG oslo_concurrency.lockutils [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] Acquired lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.546021] env[68443]: DEBUG nova.network.neutron [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Refreshing network info cache for port 9ff274e0-27c5-464b-a8f4-5d166dbd662c {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1210.136537] env[68443]: DEBUG nova.network.neutron [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Updated VIF entry in instance network info cache for port 9ff274e0-27c5-464b-a8f4-5d166dbd662c. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1210.136887] env[68443]: DEBUG nova.network.neutron [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Updating instance_info_cache with network_info: [{"id": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "address": "fa:16:3e:84:24:2b", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff274e0-27", "ovs_interfaceid": "9ff274e0-27c5-464b-a8f4-5d166dbd662c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.147048] env[68443]: DEBUG oslo_concurrency.lockutils [req-2f20de27-378c-4afa-a88c-c6f4d6ffc68c req-a08ff59e-4106-43be-b01e-853550cf45a0 service nova] Releasing lock "refresh_cache-bcdc4f46-810d-4ed7-84f1-2db2c318f920" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.722725] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.672730] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "a4708485-db53-416e-94be-f9a017eb28c4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.672995] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.226120] env[68443]: WARNING oslo_vmware.rw_handles [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1253.226120] env[68443]: ERROR oslo_vmware.rw_handles [ 1253.226669] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1253.229062] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1253.229337] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Copying Virtual Disk [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/2c9f781a-6aac-4de2-89bd-6644807f50b0/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1253.229624] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c828abeb-1cc3-4f54-b09e-64c3d111e03c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.237903] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1253.237903] env[68443]: value = "task-3373984" [ 1253.237903] env[68443]: _type = "Task" [ 1253.237903] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.246525] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': task-3373984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.462581] env[68443]: DEBUG oslo_concurrency.lockutils [None req-90bb6610-682b-45c5-bf6c-7e16e59ac82d tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Acquiring lock "6817aec7-2b56-4a82-ad46-e1957588a8a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.462813] env[68443]: DEBUG oslo_concurrency.lockutils [None req-90bb6610-682b-45c5-bf6c-7e16e59ac82d tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "6817aec7-2b56-4a82-ad46-e1957588a8a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.748438] env[68443]: DEBUG oslo_vmware.exceptions [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1253.748715] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.749361] env[68443]: ERROR nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1253.749361] env[68443]: Faults: ['InvalidArgument'] [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] Traceback (most recent call last): [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] yield resources [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self.driver.spawn(context, instance, image_meta, [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self._fetch_image_if_missing(context, vi) [ 1253.749361] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] image_cache(vi, tmp_image_ds_loc) [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] vm_util.copy_virtual_disk( [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] session._wait_for_task(vmdk_copy_task) [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return self.wait_for_task(task_ref) [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return evt.wait() [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] result = hub.switch() [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1253.749766] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return self.greenlet.switch() [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self.f(*self.args, **self.kw) [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] raise exceptions.translate_fault(task_info.error) [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] Faults: ['InvalidArgument'] [ 1253.750160] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] [ 1253.750160] env[68443]: INFO nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Terminating instance [ 1253.751236] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.751454] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.751710] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d003c871-1984-4232-8234-10f7fa6296bb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.753961] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1253.754188] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1253.754921] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43797046-8f05-4e56-9e8f-d1d9dae03b24 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.761904] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1253.762186] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-757b0626-44f6-40eb-9128-d0688f529c12 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.764403] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.764547] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1253.765542] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0da7c04-47a9-4bd7-a018-33a2f21fa4c3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.770792] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for the task: (returnval){ [ 1253.770792] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]522a27c7-5326-be06-7794-e90f2ee63fd1" [ 1253.770792] env[68443]: _type = "Task" [ 1253.770792] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.777925] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]522a27c7-5326-be06-7794-e90f2ee63fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.839607] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1253.839833] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1253.840030] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleting the datastore file [datastore1] 63801b63-1601-4e77-a500-3569713177bd {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.840311] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1017d9ca-87a8-43f8-9415-7ee4871bb6e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.847218] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1253.847218] env[68443]: value = "task-3373986" [ 1253.847218] env[68443]: _type = "Task" [ 1253.847218] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.854776] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': task-3373986, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.281208] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1254.281558] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Creating directory with path [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.281705] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c62b039e-1ccd-403a-b82d-5a2aacf820d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.292476] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Created directory with path [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.292656] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Fetch image to [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1254.292824] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1254.293526] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a3e4ae-efe4-46d9-9352-d70a5b878b1a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.299723] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49fdfd83-ee11-4e3b-9834-ee59b37f336f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.308246] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce61068-ecb9-481b-aaa9-4015f0445b42 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.338598] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45225210-34f9-4519-bd91-aeebd06c7e83 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.344325] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-deb69b2e-8895-48cd-9941-9dcff0338870 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.355325] env[68443]: DEBUG oslo_vmware.api [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': task-3373986, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061507} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.355576] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1254.355754] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1254.355923] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1254.356108] env[68443]: INFO nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1254.358210] env[68443]: DEBUG nova.compute.claims [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1254.358414] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.358645] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.364457] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1254.415191] env[68443]: DEBUG oslo_vmware.rw_handles [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1254.476509] env[68443]: DEBUG oslo_vmware.rw_handles [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1254.476509] env[68443]: DEBUG oslo_vmware.rw_handles [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1254.763476] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad460ee9-a987-40b2-8e33-8ec7fc9325fc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.771094] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667f985e-2854-40c9-bb7f-272635a47c00 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.800376] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b02a6fe-6785-4e01-a6a5-63efc1d9e6a0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.808058] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ed3704-3cd8-4b1a-97ab-5603b005e405 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.820559] env[68443]: DEBUG nova.compute.provider_tree [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.829446] env[68443]: DEBUG nova.scheduler.client.report [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1254.847403] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.488s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1254.847590] env[68443]: ERROR nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1254.847590] env[68443]: Faults: ['InvalidArgument'] [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] Traceback (most recent call last): [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self.driver.spawn(context, instance, image_meta, [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self._fetch_image_if_missing(context, vi) [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] image_cache(vi, tmp_image_ds_loc) [ 1254.847590] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] vm_util.copy_virtual_disk( [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] session._wait_for_task(vmdk_copy_task) [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return self.wait_for_task(task_ref) [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return evt.wait() [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] result = hub.switch() [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] return self.greenlet.switch() [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1254.847984] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] self.f(*self.args, **self.kw) [ 1254.848337] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1254.848337] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] raise exceptions.translate_fault(task_info.error) [ 1254.848337] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1254.848337] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] Faults: ['InvalidArgument'] [ 1254.848337] env[68443]: ERROR nova.compute.manager [instance: 63801b63-1601-4e77-a500-3569713177bd] [ 1254.848337] env[68443]: DEBUG nova.compute.utils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1254.849774] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Build of instance 63801b63-1601-4e77-a500-3569713177bd was re-scheduled: A specified parameter was not correct: fileType [ 1254.849774] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1254.850157] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1254.850334] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1254.850489] env[68443]: DEBUG nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1254.850650] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1255.165482] env[68443]: DEBUG nova.network.neutron [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.179474] env[68443]: INFO nova.compute.manager [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Took 0.33 seconds to deallocate network for instance. [ 1255.268849] env[68443]: INFO nova.scheduler.client.report [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleted allocations for instance 63801b63-1601-4e77-a500-3569713177bd [ 1255.290960] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5b632b4f-07ec-4fec-9009-28d0a2710d8b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 521.943s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.292166] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 320.860s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.292398] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "63801b63-1601-4e77-a500-3569713177bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.292610] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.292789] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.294753] env[68443]: INFO nova.compute.manager [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Terminating instance [ 1255.296484] env[68443]: DEBUG nova.compute.manager [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1255.296676] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1255.297161] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac798b38-d658-4f93-8127-44d1de38a28f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.302545] env[68443]: DEBUG nova.compute.manager [None req-cf799ce0-fb2d-4254-8183-19dba0e13900 tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: 44aa2211-e364-46c6-9cad-a53f5563808b] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1255.309070] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798d95cd-833d-4db8-bf25-def62812ecee {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.337838] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63801b63-1601-4e77-a500-3569713177bd could not be found. [ 1255.338055] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1255.338244] env[68443]: INFO nova.compute.manager [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 63801b63-1601-4e77-a500-3569713177bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1255.338485] env[68443]: DEBUG oslo.service.loopingcall [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.338884] env[68443]: DEBUG nova.compute.manager [None req-cf799ce0-fb2d-4254-8183-19dba0e13900 tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: 44aa2211-e364-46c6-9cad-a53f5563808b] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1255.339730] env[68443]: DEBUG nova.compute.manager [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1255.339877] env[68443]: DEBUG nova.network.neutron [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1255.361620] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf799ce0-fb2d-4254-8183-19dba0e13900 tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "44aa2211-e364-46c6-9cad-a53f5563808b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.240s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.366505] env[68443]: DEBUG nova.network.neutron [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.372182] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1255.374832] env[68443]: INFO nova.compute.manager [-] [instance: 63801b63-1601-4e77-a500-3569713177bd] Took 0.03 seconds to deallocate network for instance. [ 1255.423512] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.423783] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.425274] env[68443]: INFO nova.compute.claims [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1255.460610] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5caaa3d4-4402-4c4c-a4ef-ad56d6da927e tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "63801b63-1601-4e77-a500-3569713177bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.755604] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191c5374-143f-4586-b21e-6912c52460b2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.763270] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5ad23f-9c3c-4715-bd94-3f0ce19e9d3c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.793143] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392bf778-47f7-4117-a4a6-46d64526183b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.800104] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01487c8-1acf-4e2b-9c92-ede3eb2e9548 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.812698] env[68443]: DEBUG nova.compute.provider_tree [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.820779] env[68443]: DEBUG nova.scheduler.client.report [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1255.836011] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.412s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.836464] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1255.870902] env[68443]: DEBUG nova.compute.utils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1255.874207] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1255.874207] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1255.881180] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1255.937290] env[68443]: DEBUG nova.policy [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5394eee9936641f986136eee619d6c2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d4692d4df3948b98eae443eebb5239b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1255.945419] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1255.971623] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1255.971875] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1255.972044] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1255.972235] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1255.972384] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1255.972537] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1255.972733] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1255.972892] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1255.973069] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1255.973231] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1255.973401] env[68443]: DEBUG nova.virt.hardware [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1255.974278] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e71584-422c-41a3-a634-fa6f65f8071d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.982106] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58aa9c95-7768-404e-b20e-347e5e47acae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.271518] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Successfully created port: 34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1256.850144] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Successfully updated port: 34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1256.862255] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.862414] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.862568] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1256.905110] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.254102] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Updating instance_info_cache with network_info: [{"id": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "address": "fa:16:3e:eb:c3:f6", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34437dae-99", "ovs_interfaceid": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.265538] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.265822] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance network_info: |[{"id": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "address": "fa:16:3e:eb:c3:f6", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34437dae-99", "ovs_interfaceid": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1257.266251] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:c3:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34437dae-99cb-42cd-89c5-8ca3bcf4ade5', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1257.274084] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating folder: Project (3d4692d4df3948b98eae443eebb5239b). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1257.274634] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-467d1f7e-feb0-4cd9-9130-7c46c016b4db {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.289099] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created folder: Project (3d4692d4df3948b98eae443eebb5239b) in parent group-v673136. [ 1257.289099] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating folder: Instances. Parent ref: group-v673203. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1257.289099] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad7565f6-eb95-47ea-9066-ebd42b1981ac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.299322] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created folder: Instances in parent group-v673203. [ 1257.299322] env[68443]: DEBUG oslo.service.loopingcall [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.299322] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1257.299322] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80eb969f-cc88-400e-8a0b-5008b46276c4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.320713] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1257.320713] env[68443]: value = "task-3373989" [ 1257.320713] env[68443]: _type = "Task" [ 1257.320713] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.326173] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373989, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.349462] env[68443]: DEBUG nova.compute.manager [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Received event network-vif-plugged-34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1257.349602] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Acquiring lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.349800] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.350040] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.350149] env[68443]: DEBUG nova.compute.manager [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] No waiting events found dispatching network-vif-plugged-34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1257.350352] env[68443]: WARNING nova.compute.manager [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Received unexpected event network-vif-plugged-34437dae-99cb-42cd-89c5-8ca3bcf4ade5 for instance with vm_state building and task_state spawning. [ 1257.350522] env[68443]: DEBUG nova.compute.manager [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Received event network-changed-34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1257.350678] env[68443]: DEBUG nova.compute.manager [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Refreshing instance network info cache due to event network-changed-34437dae-99cb-42cd-89c5-8ca3bcf4ade5. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1257.350863] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Acquiring lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.350993] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Acquired lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.351285] env[68443]: DEBUG nova.network.neutron [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Refreshing network info cache for port 34437dae-99cb-42cd-89c5-8ca3bcf4ade5 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1257.637877] env[68443]: DEBUG nova.network.neutron [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Updated VIF entry in instance network info cache for port 34437dae-99cb-42cd-89c5-8ca3bcf4ade5. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1257.638271] env[68443]: DEBUG nova.network.neutron [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Updating instance_info_cache with network_info: [{"id": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "address": "fa:16:3e:eb:c3:f6", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34437dae-99", "ovs_interfaceid": "34437dae-99cb-42cd-89c5-8ca3bcf4ade5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.648400] env[68443]: DEBUG oslo_concurrency.lockutils [req-0598881e-3de1-498b-bf1b-a266918e7e5b req-36f45c68-1bfb-42a4-bf81-694646ab3510 service nova] Releasing lock "refresh_cache-91fd9c10-db96-4366-9548-13b36f94db6b" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.829179] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373989, 'name': CreateVM_Task, 'duration_secs': 0.315194} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.829358] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1257.829965] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.830178] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.830484] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1257.830719] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76b36b8f-0253-4835-9bcf-9176aedac619 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.835017] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 1257.835017] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52e3ecaa-841e-a33f-de85-1d810b750d2f" [ 1257.835017] env[68443]: _type = "Task" [ 1257.835017] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.842277] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52e3ecaa-841e-a33f-de85-1d810b750d2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.345579] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.345579] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1258.345579] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.825050] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1259.825360] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1259.838544] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1260.841963] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "91fd9c10-db96-4366-9548-13b36f94db6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.838276] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.825983] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.826283] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.825636] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1264.826095] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1264.826269] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1264.846671] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.846837] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.846971] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.849678] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.849678] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.849678] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.849678] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.849678] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.850017] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.850017] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1264.850017] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1265.824841] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.854891] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.855092] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1266.837978] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.838251] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.838399] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1266.838549] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.849833] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.850061] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1266.850231] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.850379] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1266.851492] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afafbab1-c444-469c-ae1c-77a5b1d8d4cf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.860370] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd81bd9-3104-4a0e-bf16-749cb952aa97 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.875397] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902f0b5c-e53b-42fd-9b27-0f1560f30227 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.881834] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c1e0ed-b2db-4001-8841-bed8ec3ef152 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.911297] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1266.911475] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.911669] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.052179] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6df57929-1115-4080-8131-8960525eb833 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052350] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052482] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052606] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052727] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052846] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.052962] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.053093] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.053209] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.053322] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1267.066111] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.076926] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1473cac8-4f3b-4c4a-ae12-a7e63e37233d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.086942] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44678947-527b-40ea-9919-b1491b6f1be3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.098523] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 578460b8-6965-4169-ba8c-a04a189af5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.112468] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 17f0507a-8889-46e1-bce3-d2d423dc9a7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.122688] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63e4023e-8a82-4179-8b7f-53801f9bb744 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.133194] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 89765b72-d495-4a2a-9b97-e8d7d1d80f49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.801055] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e232a1e8-c431-4b33-aa45-9de3a337f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.812157] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.822729] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bd894d43-4d8f-438b-aea8-29bcb43c77fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.832431] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ccf8bec6-77c7-4208-a808-e0b012c04f98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.842601] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7a989ca4-b091-457b-a9ef-57083a8a285e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.854232] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2a6b143c-5702-4ca4-81c7-8114ecfb441d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.864163] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 196d4d2c-ca4a-47ac-a448-f1caa0fe0854 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.874486] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.885155] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.899030] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6817aec7-2b56-4a82-ad46-e1957588a8a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1267.899030] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1267.899258] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1267.914880] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1267.929506] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1267.929671] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1267.940725] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1267.958730] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1268.262917] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e010836-3153-4426-9591-09d2277e9cdb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.270463] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d7da8c-bca9-4226-8f3d-819efc357653 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.299260] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d16f99c-5616-4e57-9853-a8bfa0c2c677 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.305813] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bcdf86-2836-402e-88f3-83597d2878be {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.318574] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.326633] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1268.340754] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1268.340926] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.429s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.825453] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.825723] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.833685] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.040638] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.070276] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 1276.070276] env[68443]: value = "domain-c8" [ 1276.070276] env[68443]: _type = "ClusterComputeResource" [ 1276.070276] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1276.071641] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267a44f4-2c0a-49f0-b238-5ac0a265fcc5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.089784] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 10 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1276.090013] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 6df57929-1115-4080-8131-8960525eb833 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090167] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 844f2b9d-ad2a-431a-a587-65ba446d571f {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090333] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid cee290e6-66e3-4d2e-a9bb-f93db33eaaaa {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090488] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 1c1acc0d-263d-4687-93ff-291d18a592d8 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090638] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090786] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 3842d98e-d971-456c-b287-53c513285acf {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.090934] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 6e162408-6d3d-42e0-8992-f5843e9e7855 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.091125] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid fd0de9a2-7a54-46be-8b6a-3415366e110c {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.091287] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid bcdc4f46-810d-4ed7-84f1-2db2c318f920 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.091434] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 91fd9c10-db96-4366-9548-13b36f94db6b {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1276.091844] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "6df57929-1115-4080-8131-8960525eb833" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.092058] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "844f2b9d-ad2a-431a-a587-65ba446d571f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.092282] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.092472] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "1c1acc0d-263d-4687-93ff-291d18a592d8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.092662] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.092852] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "3842d98e-d971-456c-b287-53c513285acf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.093054] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "6e162408-6d3d-42e0-8992-f5843e9e7855" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.093250] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.093437] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.093626] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "91fd9c10-db96-4366-9548-13b36f94db6b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.330125] env[68443]: WARNING oslo_vmware.rw_handles [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1302.330125] env[68443]: ERROR oslo_vmware.rw_handles [ 1302.330125] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1302.331440] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1302.331702] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Copying Virtual Disk [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/15b9da83-22ba-4e20-9b54-3ce19d1764aa/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1302.331991] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ee7630d-08ad-4b2d-b1e1-355a3ef509a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.341092] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for the task: (returnval){ [ 1302.341092] env[68443]: value = "task-3373990" [ 1302.341092] env[68443]: _type = "Task" [ 1302.341092] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.349719] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Task: {'id': task-3373990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.851576] env[68443]: DEBUG oslo_vmware.exceptions [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1302.851855] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.852412] env[68443]: ERROR nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1302.852412] env[68443]: Faults: ['InvalidArgument'] [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] Traceback (most recent call last): [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] yield resources [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self.driver.spawn(context, instance, image_meta, [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self._fetch_image_if_missing(context, vi) [ 1302.852412] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] image_cache(vi, tmp_image_ds_loc) [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] vm_util.copy_virtual_disk( [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] session._wait_for_task(vmdk_copy_task) [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return self.wait_for_task(task_ref) [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return evt.wait() [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] result = hub.switch() [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1302.852837] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return self.greenlet.switch() [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self.f(*self.args, **self.kw) [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] raise exceptions.translate_fault(task_info.error) [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] Faults: ['InvalidArgument'] [ 1302.853257] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] [ 1302.853257] env[68443]: INFO nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Terminating instance [ 1302.854313] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.854543] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1302.854779] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69316162-78df-4b20-b1d1-ded4b3fb9a3c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.857249] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1302.857371] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1302.857994] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576d2128-32e9-4abd-a17e-a57e2e2010a5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.865413] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1302.866353] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f2a5fef-d1d1-4932-926a-bad570893989 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.867687] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1302.867862] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1302.868552] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-397c862a-d9fb-4045-b973-002784e8db18 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.874693] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for the task: (returnval){ [ 1302.874693] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5291b666-eaef-a12d-b8fd-3e00954beab5" [ 1302.874693] env[68443]: _type = "Task" [ 1302.874693] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.881614] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5291b666-eaef-a12d-b8fd-3e00954beab5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.948379] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1302.948665] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1302.948665] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Deleting the datastore file [datastore1] 6df57929-1115-4080-8131-8960525eb833 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1302.948964] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e24d42b-6336-4c00-90b5-c7f231af518d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.955345] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for the task: (returnval){ [ 1302.955345] env[68443]: value = "task-3373992" [ 1302.955345] env[68443]: _type = "Task" [ 1302.955345] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.963369] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Task: {'id': task-3373992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.385467] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1303.385773] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Creating directory with path [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1303.385773] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-617abd3c-76f8-4ec4-a179-8c4fa158cd4e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.396903] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Created directory with path [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1303.397110] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Fetch image to [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1303.397285] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1303.397992] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee67facd-0da8-49af-839c-7a6fdd88951f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.404391] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92eb107a-8125-4b83-8a65-2cc9f5afa28f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.413131] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf1d927-2393-4c67-b4d7-8f20f448bbb5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.443680] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0a7ac1-a5b8-439c-9f6a-712b845b181d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.449625] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4d4b0594-2165-424b-bc39-79fe4c9a8914 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.463974] env[68443]: DEBUG oslo_vmware.api [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Task: {'id': task-3373992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062423} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.464232] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.464401] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1303.464573] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1303.464747] env[68443]: INFO nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1303.467024] env[68443]: DEBUG nova.compute.claims [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1303.467179] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.467409] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.471573] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1303.525103] env[68443]: DEBUG oslo_vmware.rw_handles [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1303.587207] env[68443]: DEBUG oslo_vmware.rw_handles [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1303.587207] env[68443]: DEBUG oslo_vmware.rw_handles [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1303.884838] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccea7814-58e9-448d-84eb-2bfe8b0aef64 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.893155] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0681eab-e9ea-4cd0-b0c9-0b233b718f78 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.924185] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5532aef6-23f8-45a9-b606-9313e0331f9f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.932010] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b98c97-f89f-4b88-a114-aa8da9ce7326 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.944775] env[68443]: DEBUG nova.compute.provider_tree [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.953267] env[68443]: DEBUG nova.scheduler.client.report [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1303.969827] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.502s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.970398] env[68443]: ERROR nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1303.970398] env[68443]: Faults: ['InvalidArgument'] [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] Traceback (most recent call last): [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self.driver.spawn(context, instance, image_meta, [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self._fetch_image_if_missing(context, vi) [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] image_cache(vi, tmp_image_ds_loc) [ 1303.970398] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] vm_util.copy_virtual_disk( [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] session._wait_for_task(vmdk_copy_task) [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return self.wait_for_task(task_ref) [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return evt.wait() [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] result = hub.switch() [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] return self.greenlet.switch() [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1303.970733] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] self.f(*self.args, **self.kw) [ 1303.971302] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1303.971302] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] raise exceptions.translate_fault(task_info.error) [ 1303.971302] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1303.971302] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] Faults: ['InvalidArgument'] [ 1303.971302] env[68443]: ERROR nova.compute.manager [instance: 6df57929-1115-4080-8131-8960525eb833] [ 1303.971302] env[68443]: DEBUG nova.compute.utils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1303.972739] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Build of instance 6df57929-1115-4080-8131-8960525eb833 was re-scheduled: A specified parameter was not correct: fileType [ 1303.972739] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1303.973129] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1303.973302] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1303.973500] env[68443]: DEBUG nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1303.973673] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1304.269550] env[68443]: DEBUG nova.network.neutron [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.279987] env[68443]: INFO nova.compute.manager [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Took 0.31 seconds to deallocate network for instance. [ 1304.373851] env[68443]: INFO nova.scheduler.client.report [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Deleted allocations for instance 6df57929-1115-4080-8131-8960525eb833 [ 1304.395915] env[68443]: DEBUG oslo_concurrency.lockutils [None req-79defe17-2a2e-4b62-ba79-229d35606003 tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 564.480s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.396982] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 364.442s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.397272] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Acquiring lock "6df57929-1115-4080-8131-8960525eb833-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.397418] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.397609] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.399633] env[68443]: INFO nova.compute.manager [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Terminating instance [ 1304.401249] env[68443]: DEBUG nova.compute.manager [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1304.401441] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1304.401942] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db4840a8-cfee-4f85-b634-93506e935041 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.411909] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aec26e2-5aa1-48d0-bc71-7d27869f10bd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.423959] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1304.446243] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6df57929-1115-4080-8131-8960525eb833 could not be found. [ 1304.446492] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1304.446735] env[68443]: INFO nova.compute.manager [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] [instance: 6df57929-1115-4080-8131-8960525eb833] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1304.446944] env[68443]: DEBUG oslo.service.loopingcall [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1304.447204] env[68443]: DEBUG nova.compute.manager [-] [instance: 6df57929-1115-4080-8131-8960525eb833] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1304.447301] env[68443]: DEBUG nova.network.neutron [-] [instance: 6df57929-1115-4080-8131-8960525eb833] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1304.478250] env[68443]: DEBUG nova.network.neutron [-] [instance: 6df57929-1115-4080-8131-8960525eb833] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.486300] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.486548] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.488025] env[68443]: INFO nova.compute.claims [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1304.491184] env[68443]: INFO nova.compute.manager [-] [instance: 6df57929-1115-4080-8131-8960525eb833] Took 0.04 seconds to deallocate network for instance. [ 1304.589221] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0af9fa-c961-49ce-be33-ee35fae4cede tempest-AttachInterfacesUnderV243Test-152744245 tempest-AttachInterfacesUnderV243Test-152744245-project-member] Lock "6df57929-1115-4080-8131-8960525eb833" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.590070] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "6df57929-1115-4080-8131-8960525eb833" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 28.498s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.590276] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6df57929-1115-4080-8131-8960525eb833] During sync_power_state the instance has a pending task (deleting). Skip. [ 1304.590478] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "6df57929-1115-4080-8131-8960525eb833" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.856019] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3212cb-991c-462e-83e6-36fc098b1eb5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.862189] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74af9836-fdad-4fde-8a13-f356c28cae32 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.895551] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f6f59a-5878-421b-a229-0d455ce7d0eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.902606] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7f2f6f-0da1-4792-865f-caca1fc9ce5a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.915205] env[68443]: DEBUG nova.compute.provider_tree [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.925870] env[68443]: DEBUG nova.scheduler.client.report [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1304.939682] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.453s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.940181] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1304.971227] env[68443]: DEBUG nova.compute.utils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1304.972635] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1304.972806] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1304.986267] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1305.049765] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1305.053055] env[68443]: DEBUG nova.policy [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '358a0d8837c149089d5fa9df3f72a945', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a8518dd13164a47b074bf96894acdbf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1305.074466] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:57:14Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='ff97dd1b-4a91-44f5-92e8-4cf874e4ffd6',id=37,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-845132838',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1305.074700] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1305.074859] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1305.075237] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1305.075237] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1305.075353] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1305.075548] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1305.075706] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1305.075870] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1305.076318] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1305.076318] env[68443]: DEBUG nova.virt.hardware [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1305.077114] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e9742d-c9c6-49bf-b6c6-cbf8ee473707 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.084705] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac20460-de51-4ffd-b24f-7ddfe611eed5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.341038] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Successfully created port: f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1305.950475] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Successfully updated port: f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.963162] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.963317] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.963468] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1306.012682] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1306.220184] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Updating instance_info_cache with network_info: [{"id": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "address": "fa:16:3e:19:28:4e", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2238bcc-1b", "ovs_interfaceid": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.231842] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.232200] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance network_info: |[{"id": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "address": "fa:16:3e:19:28:4e", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2238bcc-1b", "ovs_interfaceid": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1306.232670] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:28:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '089a7624-43ba-4fce-bfc0-63e4bb7f9aeb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2238bcc-1b79-4c98-bfd7-fe2c48295d60', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.240401] env[68443]: DEBUG oslo.service.loopingcall [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.240985] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1306.241223] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fb14a01-2e1c-45e5-b1b1-d8e802d3804b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.263818] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.263818] env[68443]: value = "task-3373993" [ 1306.263818] env[68443]: _type = "Task" [ 1306.263818] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.274078] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373993, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.360643] env[68443]: DEBUG nova.compute.manager [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Received event network-vif-plugged-f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1306.360876] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Acquiring lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.361113] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.361311] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1306.361432] env[68443]: DEBUG nova.compute.manager [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] No waiting events found dispatching network-vif-plugged-f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1306.361576] env[68443]: WARNING nova.compute.manager [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Received unexpected event network-vif-plugged-f2238bcc-1b79-4c98-bfd7-fe2c48295d60 for instance with vm_state building and task_state spawning. [ 1306.361743] env[68443]: DEBUG nova.compute.manager [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Received event network-changed-f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1306.361879] env[68443]: DEBUG nova.compute.manager [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Refreshing instance network info cache due to event network-changed-f2238bcc-1b79-4c98-bfd7-fe2c48295d60. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1306.362069] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Acquiring lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.362207] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Acquired lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.362386] env[68443]: DEBUG nova.network.neutron [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Refreshing network info cache for port f2238bcc-1b79-4c98-bfd7-fe2c48295d60 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1306.655185] env[68443]: DEBUG nova.network.neutron [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Updated VIF entry in instance network info cache for port f2238bcc-1b79-4c98-bfd7-fe2c48295d60. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1306.655855] env[68443]: DEBUG nova.network.neutron [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Updating instance_info_cache with network_info: [{"id": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "address": "fa:16:3e:19:28:4e", "network": {"id": "5875527f-2d57-4a67-a545-9086340a6dfe", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f4f2a1e220914ec3b281775c224df247", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "089a7624-43ba-4fce-bfc0-63e4bb7f9aeb", "external-id": "nsx-vlan-transportzone-218", "segmentation_id": 218, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2238bcc-1b", "ovs_interfaceid": "f2238bcc-1b79-4c98-bfd7-fe2c48295d60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.666050] env[68443]: DEBUG oslo_concurrency.lockutils [req-bf36eb43-ada8-4c69-8913-435a7de55a72 req-67a0367c-3a44-40ac-9a74-deb91fa5117e service nova] Releasing lock "refresh_cache-75e3d9b0-4317-4e6e-9f2b-d32134f7223f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.759939] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1306.760171] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1306.773468] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373993, 'name': CreateVM_Task, 'duration_secs': 0.352184} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.773625] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1306.774241] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.774403] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.774701] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1306.774930] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d125099a-f1f0-43a6-bb4d-2cde8bc03d54 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.779981] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1306.779981] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52e2e921-4a47-a441-c37e-7799fab8d3de" [ 1306.779981] env[68443]: _type = "Task" [ 1306.779981] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.787250] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52e2e921-4a47-a441-c37e-7799fab8d3de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.290338] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.290338] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.290683] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.878661] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.826428] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.826651] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.826445] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.826811] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1324.826811] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1324.847591] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.847766] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.847917] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848214] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848385] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848551] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848702] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848843] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.848994] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.849173] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1324.849312] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1327.824147] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.824417] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.824522] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1328.097298] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.824733] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.838119] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.838407] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.838600] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.838765] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1328.840134] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8379b6cd-6fae-4e8e-8f1e-b8499f7d03f1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.849328] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f7f1f3-d41c-42f0-8914-5e06e5ec5044 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.864538] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f6e05c-b9d9-4942-a2be-b438f810c73c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.871099] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06bf59-c368-473f-85b0-7da4e5f2a535 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.899952] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180966MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1328.900123] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.900323] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.974153] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 844f2b9d-ad2a-431a-a587-65ba446d571f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974312] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974442] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974567] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974688] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974805] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.974923] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.975054] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.975174] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.975288] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1328.987322] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1473cac8-4f3b-4c4a-ae12-a7e63e37233d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1328.999008] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 44678947-527b-40ea-9919-b1491b6f1be3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.011540] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 578460b8-6965-4169-ba8c-a04a189af5be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.021613] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 17f0507a-8889-46e1-bce3-d2d423dc9a7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.032189] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 63e4023e-8a82-4179-8b7f-53801f9bb744 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.042896] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 89765b72-d495-4a2a-9b97-e8d7d1d80f49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.052616] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance e232a1e8-c431-4b33-aa45-9de3a337f632 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.062474] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.072101] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bd894d43-4d8f-438b-aea8-29bcb43c77fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.083550] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance ccf8bec6-77c7-4208-a808-e0b012c04f98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.092708] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7a989ca4-b091-457b-a9ef-57083a8a285e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.102329] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2a6b143c-5702-4ca4-81c7-8114ecfb441d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.111192] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 196d4d2c-ca4a-47ac-a448-f1caa0fe0854 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.120341] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.130647] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.140374] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6817aec7-2b56-4a82-ad46-e1957588a8a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.149606] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.149848] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1329.149995] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1329.518130] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34602eb-ecf1-4c2e-8f1a-569965e5b8e7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.525790] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3b3f12-89f2-427a-8d6a-01a3e03e9c99 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.554500] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40bbce9-7d26-4c90-b454-a8c617090ae5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.561258] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eec885c-dfaa-4f9e-815f-8ee5ca44bc5c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.573789] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.582237] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1329.596171] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1329.596351] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.696s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.592852] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.825392] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.334119] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "99b16cd5-beb0-4f71-8011-411b84ddf497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.334119] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.444838] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.445156] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.355093] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8575d97-1541-4496-a77b-a66c3e6b7fc4 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "04efeff0-d708-4ab6-bd7a-b438bf28c1d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.355402] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8575d97-1541-4496-a77b-a66c3e6b7fc4 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "04efeff0-d708-4ab6-bd7a-b438bf28c1d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.839984] env[68443]: WARNING oslo_vmware.rw_handles [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1348.839984] env[68443]: ERROR oslo_vmware.rw_handles [ 1348.840870] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1348.843119] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1348.843383] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Copying Virtual Disk [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/9006e852-595f-41f5-88c0-45e854ef5a2c/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1348.843676] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2417d2db-fdb8-4d19-8cf0-a55f228e1146 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.853830] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for the task: (returnval){ [ 1348.853830] env[68443]: value = "task-3373994" [ 1348.853830] env[68443]: _type = "Task" [ 1348.853830] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.862182] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Task: {'id': task-3373994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.364633] env[68443]: DEBUG oslo_vmware.exceptions [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1349.364929] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.365511] env[68443]: ERROR nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.365511] env[68443]: Faults: ['InvalidArgument'] [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Traceback (most recent call last): [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] yield resources [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self.driver.spawn(context, instance, image_meta, [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self._fetch_image_if_missing(context, vi) [ 1349.365511] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] image_cache(vi, tmp_image_ds_loc) [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] vm_util.copy_virtual_disk( [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] session._wait_for_task(vmdk_copy_task) [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return self.wait_for_task(task_ref) [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return evt.wait() [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] result = hub.switch() [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1349.366117] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return self.greenlet.switch() [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self.f(*self.args, **self.kw) [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] raise exceptions.translate_fault(task_info.error) [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Faults: ['InvalidArgument'] [ 1349.366595] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] [ 1349.366595] env[68443]: INFO nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Terminating instance [ 1349.367697] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.367923] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.368575] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1349.368759] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1349.369097] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed93ae4c-4abf-45ab-ac16-359d04fd5038 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.371695] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755398c6-b255-426e-ba60-a9be8e76d033 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.378918] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1349.379160] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44320936-8253-4553-b986-98e9d40cea39 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.381383] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.381558] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1349.382812] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c45c3b-7fcf-480c-bd5a-571eb30377ce {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.388103] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for the task: (returnval){ [ 1349.388103] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5231a2db-14ac-c24f-cbb3-0186f672c0a7" [ 1349.388103] env[68443]: _type = "Task" [ 1349.388103] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.400665] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5231a2db-14ac-c24f-cbb3-0186f672c0a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.462330] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1349.462504] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1349.462712] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Deleting the datastore file [datastore1] 844f2b9d-ad2a-431a-a587-65ba446d571f {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.463011] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-10a08a1b-1e4e-4025-8971-5c393d74fece {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.470127] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for the task: (returnval){ [ 1349.470127] env[68443]: value = "task-3373996" [ 1349.470127] env[68443]: _type = "Task" [ 1349.470127] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.478279] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Task: {'id': task-3373996, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.905328] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1349.911895] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Creating directory with path [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.911895] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4261496e-b5c9-427c-9272-4cbbcf8a744a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.925203] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Created directory with path [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.925613] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Fetch image to [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1349.925983] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1349.927035] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb272405-730c-48bb-9690-3f9b10d3467c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.935121] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5ecebe-f7fc-4dc3-9781-f32429c3d492 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.948997] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4658962b-842f-4610-9200-9af6453a5f20 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.988324] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036d2541-01d5-4e2f-b445-accf7309050b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.996842] env[68443]: DEBUG oslo_vmware.api [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Task: {'id': task-3373996, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094913} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.998719] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.999106] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1349.999430] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1349.999901] env[68443]: INFO nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1350.003106] env[68443]: DEBUG nova.compute.claims [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1350.003428] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.003789] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.006523] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d67e6a29-483b-4564-954c-0fda1e9d5227 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.031934] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1350.247027] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1350.312693] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1350.312881] env[68443]: DEBUG oslo_vmware.rw_handles [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1350.460995] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e55c476-218c-4795-a6d6-5317f68bd9ee {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.469670] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4226595-313f-40fd-ba77-f4311ce7462a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.500745] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7511d48b-48bb-4d2f-8463-84f505e1a1e8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.508338] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0caed1-6abf-49a6-8242-1fa3e8b32926 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.522069] env[68443]: DEBUG nova.compute.provider_tree [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.534974] env[68443]: DEBUG nova.scheduler.client.report [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.555014] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.551s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.555579] env[68443]: ERROR nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1350.555579] env[68443]: Faults: ['InvalidArgument'] [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Traceback (most recent call last): [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self.driver.spawn(context, instance, image_meta, [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self._fetch_image_if_missing(context, vi) [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] image_cache(vi, tmp_image_ds_loc) [ 1350.555579] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] vm_util.copy_virtual_disk( [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] session._wait_for_task(vmdk_copy_task) [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return self.wait_for_task(task_ref) [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return evt.wait() [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] result = hub.switch() [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] return self.greenlet.switch() [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1350.555976] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] self.f(*self.args, **self.kw) [ 1350.556329] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1350.556329] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] raise exceptions.translate_fault(task_info.error) [ 1350.556329] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1350.556329] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Faults: ['InvalidArgument'] [ 1350.556329] env[68443]: ERROR nova.compute.manager [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] [ 1350.556464] env[68443]: DEBUG nova.compute.utils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1350.558153] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Build of instance 844f2b9d-ad2a-431a-a587-65ba446d571f was re-scheduled: A specified parameter was not correct: fileType [ 1350.558153] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1350.558535] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1350.558711] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1350.558918] env[68443]: DEBUG nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1350.559120] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1351.007964] env[68443]: DEBUG nova.network.neutron [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.018982] env[68443]: INFO nova.compute.manager [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Took 0.46 seconds to deallocate network for instance. [ 1351.130406] env[68443]: INFO nova.scheduler.client.report [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Deleted allocations for instance 844f2b9d-ad2a-431a-a587-65ba446d571f [ 1351.160529] env[68443]: DEBUG oslo_concurrency.lockutils [None req-73dc6877-6404-44e0-8504-0dd68c894aae tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.245s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.162658] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 411.598s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.162658] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Acquiring lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.162658] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.162879] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.164935] env[68443]: INFO nova.compute.manager [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Terminating instance [ 1351.166671] env[68443]: DEBUG nova.compute.manager [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1351.166892] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1351.167698] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bfebd68-427d-4a44-b348-0f6fe1d8f8d0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.176494] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9634dfbd-36d0-498f-8d3c-d116503cc937 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.188671] env[68443]: DEBUG nova.compute.manager [None req-b187f8f5-a692-4f62-a0a2-c99437b9ac54 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 1473cac8-4f3b-4c4a-ae12-a7e63e37233d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.209404] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 844f2b9d-ad2a-431a-a587-65ba446d571f could not be found. [ 1351.209619] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1351.209815] env[68443]: INFO nova.compute.manager [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1351.210106] env[68443]: DEBUG oslo.service.loopingcall [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1351.210393] env[68443]: DEBUG nova.compute.manager [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1351.210494] env[68443]: DEBUG nova.network.neutron [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1351.220838] env[68443]: DEBUG nova.compute.manager [None req-b187f8f5-a692-4f62-a0a2-c99437b9ac54 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 1473cac8-4f3b-4c4a-ae12-a7e63e37233d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.235644] env[68443]: DEBUG nova.network.neutron [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.243790] env[68443]: INFO nova.compute.manager [-] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] Took 0.03 seconds to deallocate network for instance. [ 1351.255221] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b187f8f5-a692-4f62-a0a2-c99437b9ac54 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "1473cac8-4f3b-4c4a-ae12-a7e63e37233d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.587s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.264604] env[68443]: DEBUG nova.compute.manager [None req-a66cb5e3-236f-4bc5-b934-f8b827c980d1 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 44678947-527b-40ea-9919-b1491b6f1be3] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.308732] env[68443]: DEBUG nova.compute.manager [None req-a66cb5e3-236f-4bc5-b934-f8b827c980d1 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 44678947-527b-40ea-9919-b1491b6f1be3] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.332134] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a66cb5e3-236f-4bc5-b934-f8b827c980d1 tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "44678947-527b-40ea-9919-b1491b6f1be3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.669s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.344603] env[68443]: DEBUG nova.compute.manager [None req-159d19b8-0058-4483-be08-dc639f349fae tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 578460b8-6965-4169-ba8c-a04a189af5be] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.366914] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f746f47f-753f-4998-977c-250495548b46 tempest-FloatingIPsAssociationTestJSON-104972580 tempest-FloatingIPsAssociationTestJSON-104972580-project-member] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.368253] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 75.276s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.368475] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 844f2b9d-ad2a-431a-a587-65ba446d571f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1351.368946] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "844f2b9d-ad2a-431a-a587-65ba446d571f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.371948] env[68443]: DEBUG nova.compute.manager [None req-159d19b8-0058-4483-be08-dc639f349fae tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] [instance: 578460b8-6965-4169-ba8c-a04a189af5be] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.394598] env[68443]: DEBUG oslo_concurrency.lockutils [None req-159d19b8-0058-4483-be08-dc639f349fae tempest-ListServerFiltersTestJSON-999443258 tempest-ListServerFiltersTestJSON-999443258-project-member] Lock "578460b8-6965-4169-ba8c-a04a189af5be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.625s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.405987] env[68443]: DEBUG nova.compute.manager [None req-02d98ff1-2944-41d3-b673-a6dcc1a2e920 tempest-TenantUsagesTestJSON-810919708 tempest-TenantUsagesTestJSON-810919708-project-member] [instance: 17f0507a-8889-46e1-bce3-d2d423dc9a7f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.440112] env[68443]: DEBUG nova.compute.manager [None req-02d98ff1-2944-41d3-b673-a6dcc1a2e920 tempest-TenantUsagesTestJSON-810919708 tempest-TenantUsagesTestJSON-810919708-project-member] [instance: 17f0507a-8889-46e1-bce3-d2d423dc9a7f] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.473127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-02d98ff1-2944-41d3-b673-a6dcc1a2e920 tempest-TenantUsagesTestJSON-810919708 tempest-TenantUsagesTestJSON-810919708-project-member] Lock "17f0507a-8889-46e1-bce3-d2d423dc9a7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.078s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.485068] env[68443]: DEBUG nova.compute.manager [None req-df12c16d-7129-4220-8f41-156e62ce0f8e tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] [instance: 63e4023e-8a82-4179-8b7f-53801f9bb744] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.517655] env[68443]: DEBUG nova.compute.manager [None req-df12c16d-7129-4220-8f41-156e62ce0f8e tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] [instance: 63e4023e-8a82-4179-8b7f-53801f9bb744] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.552596] env[68443]: DEBUG oslo_concurrency.lockutils [None req-df12c16d-7129-4220-8f41-156e62ce0f8e tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Lock "63e4023e-8a82-4179-8b7f-53801f9bb744" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.391s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.564800] env[68443]: DEBUG nova.compute.manager [None req-06e33e97-4f25-4fc8-823f-045355eec793 tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] [instance: 89765b72-d495-4a2a-9b97-e8d7d1d80f49] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.589989] env[68443]: DEBUG nova.compute.manager [None req-06e33e97-4f25-4fc8-823f-045355eec793 tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] [instance: 89765b72-d495-4a2a-9b97-e8d7d1d80f49] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.613081] env[68443]: DEBUG oslo_concurrency.lockutils [None req-06e33e97-4f25-4fc8-823f-045355eec793 tempest-ServerRescueNegativeTestJSON-1396630101 tempest-ServerRescueNegativeTestJSON-1396630101-project-member] Lock "89765b72-d495-4a2a-9b97-e8d7d1d80f49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.958s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.623067] env[68443]: DEBUG nova.compute.manager [None req-94e2fd78-7e45-4e02-8f38-a04ee4b283cc tempest-ImagesNegativeTestJSON-1935587335 tempest-ImagesNegativeTestJSON-1935587335-project-member] [instance: e232a1e8-c431-4b33-aa45-9de3a337f632] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.648455] env[68443]: DEBUG nova.compute.manager [None req-94e2fd78-7e45-4e02-8f38-a04ee4b283cc tempest-ImagesNegativeTestJSON-1935587335 tempest-ImagesNegativeTestJSON-1935587335-project-member] [instance: e232a1e8-c431-4b33-aa45-9de3a337f632] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1351.671203] env[68443]: DEBUG oslo_concurrency.lockutils [None req-94e2fd78-7e45-4e02-8f38-a04ee4b283cc tempest-ImagesNegativeTestJSON-1935587335 tempest-ImagesNegativeTestJSON-1935587335-project-member] Lock "e232a1e8-c431-4b33-aa45-9de3a337f632" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.838s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.680447] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1351.732858] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.733124] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.734510] env[68443]: INFO nova.compute.claims [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1352.019570] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd17e805-eb81-418a-8da3-5a49d493c78d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.028039] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5247e3-e570-4a82-9f50-56b53a71961d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.057251] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fec9062-7ad1-415d-82bf-0d4de52e6f0c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.064430] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ba409a-182b-42b6-b95e-79ba7eaf2235 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.077407] env[68443]: DEBUG nova.compute.provider_tree [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.086097] env[68443]: DEBUG nova.scheduler.client.report [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1352.100452] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.367s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.100967] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1352.131474] env[68443]: DEBUG nova.compute.utils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1352.132925] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1352.133117] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1352.141920] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1352.198578] env[68443]: DEBUG nova.policy [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d8e52ac3e9c4bb986cad851926676f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c01fff1d9e874748933155b23733c236', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1352.209898] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1352.237965] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1352.238281] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1352.238458] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.238688] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1352.238886] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.239065] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1352.239309] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1352.239498] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1352.239697] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1352.239892] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1352.240104] env[68443]: DEBUG nova.virt.hardware [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1352.241029] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a0c1d2-d301-45a9-b3a4-1f3fcf505e29 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.249207] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6767f97-69f5-46a7-a120-b55fcdc6b43a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.508967] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Successfully created port: a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1353.189654] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Successfully updated port: a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.206906] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.206906] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquired lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.206906] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1353.227277] env[68443]: DEBUG nova.compute.manager [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Received event network-vif-plugged-a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1353.227470] env[68443]: DEBUG oslo_concurrency.lockutils [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] Acquiring lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.227678] env[68443]: DEBUG oslo_concurrency.lockutils [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.227826] env[68443]: DEBUG oslo_concurrency.lockutils [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.227998] env[68443]: DEBUG nova.compute.manager [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] No waiting events found dispatching network-vif-plugged-a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.228173] env[68443]: WARNING nova.compute.manager [req-0fbe6d13-2d40-489e-80f8-7b53dccc9252 req-91bafd64-63c7-44ec-bf18-af529a19c032 service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Received unexpected event network-vif-plugged-a2efc7ea-4188-47fe-ad83-7b439d785b71 for instance with vm_state building and task_state spawning. [ 1353.250068] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1353.429667] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Updating instance_info_cache with network_info: [{"id": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "address": "fa:16:3e:cd:31:c1", "network": {"id": "adc0518d-2eac-471b-9de6-410ce0f88240", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1726004855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c01fff1d9e874748933155b23733c236", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2efc7ea-41", "ovs_interfaceid": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.440521] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Releasing lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.440754] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance network_info: |[{"id": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "address": "fa:16:3e:cd:31:c1", "network": {"id": "adc0518d-2eac-471b-9de6-410ce0f88240", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1726004855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c01fff1d9e874748933155b23733c236", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2efc7ea-41", "ovs_interfaceid": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1353.441178] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:31:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73915082-a1b0-460b-b24d-97588fc9cb29', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2efc7ea-4188-47fe-ad83-7b439d785b71', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1353.448617] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Creating folder: Project (c01fff1d9e874748933155b23733c236). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1353.449163] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26cd0206-da15-4b96-a087-947ab2322268 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.459424] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Created folder: Project (c01fff1d9e874748933155b23733c236) in parent group-v673136. [ 1353.459605] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Creating folder: Instances. Parent ref: group-v673207. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1353.459874] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79096e2a-986f-4e02-8cab-8426af3c972e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.468807] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Created folder: Instances in parent group-v673207. [ 1353.469065] env[68443]: DEBUG oslo.service.loopingcall [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.469252] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1353.469440] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0b33554-c641-4547-9a2d-0d0faaddec06 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.488530] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1353.488530] env[68443]: value = "task-3373999" [ 1353.488530] env[68443]: _type = "Task" [ 1353.488530] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.495661] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373999, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.963904] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.998574] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3373999, 'name': CreateVM_Task, 'duration_secs': 0.283853} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.998777] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1354.005703] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.005880] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.006228] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.006472] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6610f25a-bf35-43c3-ac95-7657674d63f8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.011022] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for the task: (returnval){ [ 1354.011022] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5261d1c6-0800-531a-92f8-b476f23bb0e3" [ 1354.011022] env[68443]: _type = "Task" [ 1354.011022] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.018324] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5261d1c6-0800-531a-92f8-b476f23bb0e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.521684] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.521684] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.521684] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.376282] env[68443]: DEBUG nova.compute.manager [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Received event network-changed-a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1355.376282] env[68443]: DEBUG nova.compute.manager [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Refreshing instance network info cache due to event network-changed-a2efc7ea-4188-47fe-ad83-7b439d785b71. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1355.376282] env[68443]: DEBUG oslo_concurrency.lockutils [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] Acquiring lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.376282] env[68443]: DEBUG oslo_concurrency.lockutils [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] Acquired lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.376439] env[68443]: DEBUG nova.network.neutron [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Refreshing network info cache for port a2efc7ea-4188-47fe-ad83-7b439d785b71 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1355.634546] env[68443]: DEBUG nova.network.neutron [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Updated VIF entry in instance network info cache for port a2efc7ea-4188-47fe-ad83-7b439d785b71. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1355.634906] env[68443]: DEBUG nova.network.neutron [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Updating instance_info_cache with network_info: [{"id": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "address": "fa:16:3e:cd:31:c1", "network": {"id": "adc0518d-2eac-471b-9de6-410ce0f88240", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1726004855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c01fff1d9e874748933155b23733c236", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73915082-a1b0-460b-b24d-97588fc9cb29", "external-id": "nsx-vlan-transportzone-744", "segmentation_id": 744, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2efc7ea-41", "ovs_interfaceid": "a2efc7ea-4188-47fe-ad83-7b439d785b71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.644250] env[68443]: DEBUG oslo_concurrency.lockutils [req-884d139e-37ac-4e43-9130-2770f7988090 req-005cf224-e564-454d-bfd7-d460fbd2b3fc service nova] Releasing lock "refresh_cache-76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.780720] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "7366efe5-c640-4689-97a1-fba0ac431b12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.780720] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.170677] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d6998428-1eaa-40e5-8ef3-f54e2672bcb6 tempest-ServerShowV257Test-1338230860 tempest-ServerShowV257Test-1338230860-project-member] Acquiring lock "0dd99061-79ed-4348-9a31-7980d6ea5db6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.170965] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d6998428-1eaa-40e5-8ef3-f54e2672bcb6 tempest-ServerShowV257Test-1338230860 tempest-ServerShowV257Test-1338230860-project-member] Lock "0dd99061-79ed-4348-9a31-7980d6ea5db6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.200734] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d0a7f73-0242-487f-8f9e-a02ac9398c39 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Acquiring lock "5943fe10-a829-4142-a4ae-c6035fe5f4e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.201039] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d0a7f73-0242-487f-8f9e-a02ac9398c39 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "5943fe10-a829-4142-a4ae-c6035fe5f4e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.275654] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f8f671bb-d719-4c2c-9d33-da95afa70cda tempest-ServerRescueTestJSON-902940739 tempest-ServerRescueTestJSON-902940739-project-member] Acquiring lock "db9a6b36-6c53-4769-b93e-3c38b95533d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.276100] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f8f671bb-d719-4c2c-9d33-da95afa70cda tempest-ServerRescueTestJSON-902940739 tempest-ServerRescueTestJSON-902940739-project-member] Lock "db9a6b36-6c53-4769-b93e-3c38b95533d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.825930] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.826138] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.826299] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.825566] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.825753] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1386.826063] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1386.876208] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876504] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876504] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876624] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876750] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876872] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.876990] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.877120] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.877236] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.877351] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1386.877469] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1388.825862] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.826198] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.826356] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1390.822546] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.844584] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.858767] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.859067] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.859285] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.859477] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1390.860732] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fdf74e-dda4-4d4c-b447-0d20d8983db6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.870373] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739d79d5-4ad0-44a0-92c0-9cc87d104a18 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.892036] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac11c26a-e41b-42d0-840f-749235faa57c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.902482] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7774f8ff-a2b6-493e-8a95-9918aba6cc65 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.937609] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1390.937795] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.937972] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.023933] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024117] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024253] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024376] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024498] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024618] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024738] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024855] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.024974] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.025102] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1391.038068] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2a6b143c-5702-4ca4-81c7-8114ecfb441d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.051474] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 196d4d2c-ca4a-47ac-a448-f1caa0fe0854 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.065567] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.078843] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.091074] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6817aec7-2b56-4a82-ad46-e1957588a8a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.104810] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.119755] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.133334] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.145204] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04efeff0-d708-4ab6-bd7a-b438bf28c1d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.157721] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.170325] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0dd99061-79ed-4348-9a31-7980d6ea5db6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.181533] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5943fe10-a829-4142-a4ae-c6035fe5f4e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.192901] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance db9a6b36-6c53-4769-b93e-3c38b95533d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1391.193154] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1391.193307] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1391.521181] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b05b81-64ba-4854-82d4-de439519d43a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.529037] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae3a02d-4f22-4bb0-abf6-f0ed2127052c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.560885] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce27e720-1144-4eac-9b91-d3094e17d2d1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.568905] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fa74a4-f9ee-489b-a58d-ba443847d5e0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.582727] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1391.593471] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1391.609404] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1391.609655] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.672s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.591234] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.591234] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.526614] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bfcc8c0d-38f3-4400-9622-5338e770e1eb tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Acquiring lock "91e6d317-9322-4938-a1da-f88d36499c7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.526842] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bfcc8c0d-38f3-4400-9622-5338e770e1eb tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Lock "91e6d317-9322-4938-a1da-f88d36499c7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.858030] env[68443]: WARNING oslo_vmware.rw_handles [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1398.858030] env[68443]: ERROR oslo_vmware.rw_handles [ 1398.858714] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1398.860625] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1398.860974] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Copying Virtual Disk [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/906b881b-e168-4eaf-a371-a55c6de7b064/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1398.861547] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2746529-5ef0-4a25-86f1-c5f17161859d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.873021] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for the task: (returnval){ [ 1398.873021] env[68443]: value = "task-3374000" [ 1398.873021] env[68443]: _type = "Task" [ 1398.873021] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.880256] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Task: {'id': task-3374000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.381654] env[68443]: DEBUG oslo_vmware.exceptions [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1399.381941] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.382524] env[68443]: ERROR nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.382524] env[68443]: Faults: ['InvalidArgument'] [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Traceback (most recent call last): [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] yield resources [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self.driver.spawn(context, instance, image_meta, [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self._fetch_image_if_missing(context, vi) [ 1399.382524] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] image_cache(vi, tmp_image_ds_loc) [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] vm_util.copy_virtual_disk( [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] session._wait_for_task(vmdk_copy_task) [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return self.wait_for_task(task_ref) [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return evt.wait() [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] result = hub.switch() [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1399.382954] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return self.greenlet.switch() [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self.f(*self.args, **self.kw) [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] raise exceptions.translate_fault(task_info.error) [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Faults: ['InvalidArgument'] [ 1399.383403] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] [ 1399.383403] env[68443]: INFO nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Terminating instance [ 1399.384456] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.385062] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.385062] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c39d5362-81f3-4438-bf94-67665b7084c8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.387274] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1399.387467] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1399.388193] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1876749b-67bc-44a5-8e4d-2045ef916471 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.394777] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1399.394994] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f092341-1798-4224-a52e-5b378e433663 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.397185] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.397361] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1399.398297] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a39e60-703d-4e33-97f9-c28a04505e94 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.403700] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for the task: (returnval){ [ 1399.403700] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]527d9549-ff70-b753-e6fb-ce3a2fc48ae1" [ 1399.403700] env[68443]: _type = "Task" [ 1399.403700] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.410514] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]527d9549-ff70-b753-e6fb-ce3a2fc48ae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.765893] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1399.766095] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1399.766238] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Deleting the datastore file [datastore1] cee290e6-66e3-4d2e-a9bb-f93db33eaaaa {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1399.766504] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31c7c048-d360-4bda-87af-3f63104c3878 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.773883] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for the task: (returnval){ [ 1399.773883] env[68443]: value = "task-3374002" [ 1399.773883] env[68443]: _type = "Task" [ 1399.773883] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.781636] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Task: {'id': task-3374002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.913788] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1399.914090] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Creating directory with path [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.914306] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd5eb87c-609c-4eb5-8869-05d8ad1efe03 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.927059] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Created directory with path [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.927059] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Fetch image to [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1399.927233] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1399.928023] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1233ae8a-f585-4afa-afaf-77231e4ce2b4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.934717] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923d40e0-d1ee-4211-a18c-43ceab264207 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.943979] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a389f79b-b31f-448e-9a03-092881e49eca {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.974077] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e0b0d5-363a-4207-948e-7c8d819caffc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.979968] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f642b641-ebd9-4e17-82e0-eafe000be057 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.001046] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1400.169551] env[68443]: DEBUG oslo_vmware.rw_handles [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1400.233183] env[68443]: DEBUG oslo_vmware.rw_handles [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1400.233427] env[68443]: DEBUG oslo_vmware.rw_handles [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1400.285474] env[68443]: DEBUG oslo_vmware.api [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Task: {'id': task-3374002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076455} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.285684] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.285877] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1400.286057] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1400.286230] env[68443]: INFO nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Took 0.90 seconds to destroy the instance on the hypervisor. [ 1400.288421] env[68443]: DEBUG nova.compute.claims [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1400.288586] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.288820] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.590021] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7e63fe-aa8e-4e82-b461-a1f59080ab63 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.597817] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ddabc4f-1327-4245-8349-a0b1580aeb1c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.626497] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2590287d-001c-474b-8411-eb4faba4d414 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.633699] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6e4ce9-7f2b-4b32-b899-1d8db262fe7e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.646211] env[68443]: DEBUG nova.compute.provider_tree [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.656011] env[68443]: DEBUG nova.scheduler.client.report [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1400.669606] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.381s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.670161] env[68443]: ERROR nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1400.670161] env[68443]: Faults: ['InvalidArgument'] [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Traceback (most recent call last): [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self.driver.spawn(context, instance, image_meta, [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self._fetch_image_if_missing(context, vi) [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] image_cache(vi, tmp_image_ds_loc) [ 1400.670161] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] vm_util.copy_virtual_disk( [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] session._wait_for_task(vmdk_copy_task) [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return self.wait_for_task(task_ref) [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return evt.wait() [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] result = hub.switch() [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] return self.greenlet.switch() [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1400.670585] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] self.f(*self.args, **self.kw) [ 1400.671013] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1400.671013] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] raise exceptions.translate_fault(task_info.error) [ 1400.671013] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1400.671013] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Faults: ['InvalidArgument'] [ 1400.671013] env[68443]: ERROR nova.compute.manager [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] [ 1400.671013] env[68443]: DEBUG nova.compute.utils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1400.672234] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Build of instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa was re-scheduled: A specified parameter was not correct: fileType [ 1400.672234] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1400.672589] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1400.672759] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1400.672930] env[68443]: DEBUG nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1400.673107] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1400.989170] env[68443]: DEBUG nova.network.neutron [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.001108] env[68443]: INFO nova.compute.manager [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Took 0.33 seconds to deallocate network for instance. [ 1401.101334] env[68443]: INFO nova.scheduler.client.report [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Deleted allocations for instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa [ 1401.126593] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b79c2c08-cfa0-42a3-a32a-38508e4c58a3 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 657.559s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.127973] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 455.839s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.128224] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Acquiring lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.128451] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.128622] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.130665] env[68443]: INFO nova.compute.manager [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Terminating instance [ 1401.132451] env[68443]: DEBUG nova.compute.manager [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1401.132648] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1401.133145] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1fdad4c-1d52-44cb-b088-ebbb7baad202 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.142654] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43845b9a-10d3-4ced-aa00-57883beb4f8d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.153313] env[68443]: DEBUG nova.compute.manager [None req-682b7980-f0e9-4b5b-81c0-d4aabe4302a2 tempest-ServersTestBootFromVolume-1371864501 tempest-ServersTestBootFromVolume-1371864501-project-member] [instance: bd894d43-4d8f-438b-aea8-29bcb43c77fa] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.173190] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cee290e6-66e3-4d2e-a9bb-f93db33eaaaa could not be found. [ 1401.173402] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1401.173581] env[68443]: INFO nova.compute.manager [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1401.173821] env[68443]: DEBUG oslo.service.loopingcall [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.174063] env[68443]: DEBUG nova.compute.manager [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1401.174163] env[68443]: DEBUG nova.network.neutron [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1401.177089] env[68443]: DEBUG nova.compute.manager [None req-682b7980-f0e9-4b5b-81c0-d4aabe4302a2 tempest-ServersTestBootFromVolume-1371864501 tempest-ServersTestBootFromVolume-1371864501-project-member] [instance: bd894d43-4d8f-438b-aea8-29bcb43c77fa] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.198420] env[68443]: DEBUG oslo_concurrency.lockutils [None req-682b7980-f0e9-4b5b-81c0-d4aabe4302a2 tempest-ServersTestBootFromVolume-1371864501 tempest-ServersTestBootFromVolume-1371864501-project-member] Lock "bd894d43-4d8f-438b-aea8-29bcb43c77fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.679s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.199859] env[68443]: DEBUG nova.network.neutron [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.210523] env[68443]: INFO nova.compute.manager [-] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] Took 0.04 seconds to deallocate network for instance. [ 1401.210970] env[68443]: DEBUG nova.compute.manager [None req-324090ca-523a-4825-a40f-8cb430d6d77e tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: ccf8bec6-77c7-4208-a808-e0b012c04f98] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.236629] env[68443]: DEBUG nova.compute.manager [None req-324090ca-523a-4825-a40f-8cb430d6d77e tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: ccf8bec6-77c7-4208-a808-e0b012c04f98] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.257233] env[68443]: DEBUG oslo_concurrency.lockutils [None req-324090ca-523a-4825-a40f-8cb430d6d77e tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "ccf8bec6-77c7-4208-a808-e0b012c04f98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.857s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.266117] env[68443]: DEBUG nova.compute.manager [None req-c5c745fc-051f-4a02-a514-acef8638f364 tempest-InstanceActionsV221TestJSON-2003231307 tempest-InstanceActionsV221TestJSON-2003231307-project-member] [instance: 7a989ca4-b091-457b-a9ef-57083a8a285e] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.289927] env[68443]: DEBUG nova.compute.manager [None req-c5c745fc-051f-4a02-a514-acef8638f364 tempest-InstanceActionsV221TestJSON-2003231307 tempest-InstanceActionsV221TestJSON-2003231307-project-member] [instance: 7a989ca4-b091-457b-a9ef-57083a8a285e] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.307194] env[68443]: DEBUG oslo_concurrency.lockutils [None req-209e370b-5bab-4e48-8f17-0644f622ce28 tempest-ServersAdminTestJSON-1774531573 tempest-ServersAdminTestJSON-1774531573-project-member] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.308023] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 125.216s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.308212] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: cee290e6-66e3-4d2e-a9bb-f93db33eaaaa] During sync_power_state the instance has a pending task (deleting). Skip. [ 1401.308388] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "cee290e6-66e3-4d2e-a9bb-f93db33eaaaa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.311954] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c5c745fc-051f-4a02-a514-acef8638f364 tempest-InstanceActionsV221TestJSON-2003231307 tempest-InstanceActionsV221TestJSON-2003231307-project-member] Lock "7a989ca4-b091-457b-a9ef-57083a8a285e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.706s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.320737] env[68443]: DEBUG nova.compute.manager [None req-0bd38fdc-f800-48b5-813f-04689303a057 tempest-ServersAaction247Test-1630594896 tempest-ServersAaction247Test-1630594896-project-member] [instance: 2a6b143c-5702-4ca4-81c7-8114ecfb441d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.344104] env[68443]: DEBUG nova.compute.manager [None req-0bd38fdc-f800-48b5-813f-04689303a057 tempest-ServersAaction247Test-1630594896 tempest-ServersAaction247Test-1630594896-project-member] [instance: 2a6b143c-5702-4ca4-81c7-8114ecfb441d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.364674] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0bd38fdc-f800-48b5-813f-04689303a057 tempest-ServersAaction247Test-1630594896 tempest-ServersAaction247Test-1630594896-project-member] Lock "2a6b143c-5702-4ca4-81c7-8114ecfb441d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.075s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.373328] env[68443]: DEBUG nova.compute.manager [None req-216d17d2-3ee0-45a0-91d7-ae6a8520e9f7 tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] [instance: 196d4d2c-ca4a-47ac-a448-f1caa0fe0854] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.395198] env[68443]: DEBUG nova.compute.manager [None req-216d17d2-3ee0-45a0-91d7-ae6a8520e9f7 tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] [instance: 196d4d2c-ca4a-47ac-a448-f1caa0fe0854] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.419898] env[68443]: DEBUG oslo_concurrency.lockutils [None req-216d17d2-3ee0-45a0-91d7-ae6a8520e9f7 tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Lock "196d4d2c-ca4a-47ac-a448-f1caa0fe0854" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.688s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.429039] env[68443]: DEBUG nova.compute.manager [None req-eb5193c6-d9bd-4e97-9578-b054701fb85e tempest-ServerPasswordTestJSON-1337937232 tempest-ServerPasswordTestJSON-1337937232-project-member] [instance: b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.451608] env[68443]: DEBUG nova.compute.manager [None req-eb5193c6-d9bd-4e97-9578-b054701fb85e tempest-ServerPasswordTestJSON-1337937232 tempest-ServerPasswordTestJSON-1337937232-project-member] [instance: b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1401.477054] env[68443]: DEBUG oslo_concurrency.lockutils [None req-eb5193c6-d9bd-4e97-9578-b054701fb85e tempest-ServerPasswordTestJSON-1337937232 tempest-ServerPasswordTestJSON-1337937232-project-member] Lock "b0d16bba-c6f9-4f8e-8fe0-bd3d42ad154c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.252s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.503747] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1401.558407] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.558657] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.560263] env[68443]: INFO nova.compute.claims [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.818833] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b1e9ac-8f90-461f-8fc5-887dabbc3963 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.826704] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d82273-add0-42c4-a3ff-393d356b0bc0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.856478] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2807d5-6ab2-49ec-bdc2-37f8dae3fafd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.863806] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55fcb1f-a000-4bfa-9d46-7da036c9c609 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.878098] env[68443]: DEBUG nova.compute.provider_tree [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.886637] env[68443]: DEBUG nova.scheduler.client.report [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1401.900450] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.342s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.900961] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1401.933130] env[68443]: DEBUG nova.compute.utils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1401.934869] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1401.935071] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1401.943495] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1401.992954] env[68443]: DEBUG nova.policy [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bad0d9d5f6584f70be82b6e158d6b08a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bdb4dff8cab48b0b4b31b3fa4e944cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1402.018216] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1402.044201] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1402.044465] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1402.044624] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1402.044806] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1402.044953] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1402.045117] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1402.045402] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1402.045482] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1402.045645] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1402.045809] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1402.045984] env[68443]: DEBUG nova.virt.hardware [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1402.046867] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db173a9d-f6fb-43cf-ae3a-ee2171596bce {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.054768] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e710667f-4087-4e7c-aad6-87608c891b23 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.314962] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Successfully created port: 19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1402.927102] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Successfully updated port: 19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.940180] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.940323] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquired lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.940641] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1402.982897] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1403.120871] env[68443]: DEBUG nova.compute.manager [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Received event network-vif-plugged-19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1403.121165] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Acquiring lock "a4708485-db53-416e-94be-f9a017eb28c4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.121329] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Lock "a4708485-db53-416e-94be-f9a017eb28c4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.121497] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Lock "a4708485-db53-416e-94be-f9a017eb28c4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.121659] env[68443]: DEBUG nova.compute.manager [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] No waiting events found dispatching network-vif-plugged-19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1403.121819] env[68443]: WARNING nova.compute.manager [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Received unexpected event network-vif-plugged-19a596e6-852f-481c-8d7a-6602b2508816 for instance with vm_state building and task_state spawning. [ 1403.123015] env[68443]: DEBUG nova.compute.manager [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Received event network-changed-19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1403.123015] env[68443]: DEBUG nova.compute.manager [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Refreshing instance network info cache due to event network-changed-19a596e6-852f-481c-8d7a-6602b2508816. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1403.123015] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Acquiring lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.156917] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Updating instance_info_cache with network_info: [{"id": "19a596e6-852f-481c-8d7a-6602b2508816", "address": "fa:16:3e:d4:91:8a", "network": {"id": "fbec3f9a-8f0c-46a7-9bfb-182a3316dd92", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1596584816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bdb4dff8cab48b0b4b31b3fa4e944cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19a596e6-85", "ovs_interfaceid": "19a596e6-852f-481c-8d7a-6602b2508816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.172127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Releasing lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.172681] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance network_info: |[{"id": "19a596e6-852f-481c-8d7a-6602b2508816", "address": "fa:16:3e:d4:91:8a", "network": {"id": "fbec3f9a-8f0c-46a7-9bfb-182a3316dd92", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1596584816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bdb4dff8cab48b0b4b31b3fa4e944cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19a596e6-85", "ovs_interfaceid": "19a596e6-852f-481c-8d7a-6602b2508816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1403.173103] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Acquired lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.173356] env[68443]: DEBUG nova.network.neutron [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Refreshing network info cache for port 19a596e6-852f-481c-8d7a-6602b2508816 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1403.174977] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:91:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c20f5114-0866-45b3-9a7c-62f113ff83fa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19a596e6-852f-481c-8d7a-6602b2508816', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.182930] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Creating folder: Project (1bdb4dff8cab48b0b4b31b3fa4e944cd). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1403.183942] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff60c9f7-fde0-4543-af4f-44c69384fef5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.198027] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Created folder: Project (1bdb4dff8cab48b0b4b31b3fa4e944cd) in parent group-v673136. [ 1403.198027] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Creating folder: Instances. Parent ref: group-v673210. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1403.198027] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70243314-f800-4423-b2ee-ae7609e1586d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.206199] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Created folder: Instances in parent group-v673210. [ 1403.206491] env[68443]: DEBUG oslo.service.loopingcall [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.206723] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1403.206971] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31724bcf-d08a-44a3-9cb5-025aaa029983 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.228678] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.228678] env[68443]: value = "task-3374005" [ 1403.228678] env[68443]: _type = "Task" [ 1403.228678] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.236216] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374005, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.448370] env[68443]: DEBUG nova.network.neutron [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Updated VIF entry in instance network info cache for port 19a596e6-852f-481c-8d7a-6602b2508816. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1403.448718] env[68443]: DEBUG nova.network.neutron [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Updating instance_info_cache with network_info: [{"id": "19a596e6-852f-481c-8d7a-6602b2508816", "address": "fa:16:3e:d4:91:8a", "network": {"id": "fbec3f9a-8f0c-46a7-9bfb-182a3316dd92", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1596584816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bdb4dff8cab48b0b4b31b3fa4e944cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c20f5114-0866-45b3-9a7c-62f113ff83fa", "external-id": "nsx-vlan-transportzone-47", "segmentation_id": 47, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19a596e6-85", "ovs_interfaceid": "19a596e6-852f-481c-8d7a-6602b2508816", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.460041] env[68443]: DEBUG oslo_concurrency.lockutils [req-d5901cf5-d298-459c-b674-3cc239684289 req-aaaae769-b13b-4af8-8fb1-7f80a0f2f9b4 service nova] Releasing lock "refresh_cache-a4708485-db53-416e-94be-f9a017eb28c4" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.739504] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374005, 'name': CreateVM_Task, 'duration_secs': 0.317675} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.739659] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1403.740261] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.740436] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.740714] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1403.740957] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f30dc4b-b161-4006-8207-99831a6b788c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.745138] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for the task: (returnval){ [ 1403.745138] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52688e62-3ab3-a18a-1a0e-7617b2e8feaa" [ 1403.745138] env[68443]: _type = "Task" [ 1403.745138] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.752213] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52688e62-3ab3-a18a-1a0e-7617b2e8feaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.255538] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.255852] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.255994] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.842254] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.842586] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.318999] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "a4708485-db53-416e-94be-f9a017eb28c4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.826467] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.826757] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1447.825368] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.103367] env[68443]: WARNING oslo_vmware.rw_handles [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1448.103367] env[68443]: ERROR oslo_vmware.rw_handles [ 1448.105255] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1448.106067] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1448.106305] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Copying Virtual Disk [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/44273f26-c683-4ed0-93c1-6ca75c50ef83/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1448.106584] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40b1ab7d-1aff-485f-bc68-9412d38b4f5c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.115400] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for the task: (returnval){ [ 1448.115400] env[68443]: value = "task-3374006" [ 1448.115400] env[68443]: _type = "Task" [ 1448.115400] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.123676] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Task: {'id': task-3374006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.625994] env[68443]: DEBUG oslo_vmware.exceptions [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1448.626360] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.627015] env[68443]: ERROR nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1448.627015] env[68443]: Faults: ['InvalidArgument'] [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Traceback (most recent call last): [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] yield resources [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self.driver.spawn(context, instance, image_meta, [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self._fetch_image_if_missing(context, vi) [ 1448.627015] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] image_cache(vi, tmp_image_ds_loc) [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] vm_util.copy_virtual_disk( [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] session._wait_for_task(vmdk_copy_task) [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return self.wait_for_task(task_ref) [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return evt.wait() [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] result = hub.switch() [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1448.627326] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return self.greenlet.switch() [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self.f(*self.args, **self.kw) [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] raise exceptions.translate_fault(task_info.error) [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Faults: ['InvalidArgument'] [ 1448.627648] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] [ 1448.627648] env[68443]: INFO nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Terminating instance [ 1448.629109] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.629359] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.629623] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42083fbd-366d-420d-a935-2171b235231b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.632169] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1448.632399] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1448.633184] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b7e70-e1ba-4956-8a80-b63c3d007fed {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.640190] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1448.641205] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27f93b17-b49f-4bfc-9709-e1d29b93d26b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.642658] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.642853] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1448.643593] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb82af3-70f0-4967-9aef-1fc79972139d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.648968] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 1448.648968] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52bc4518-8cd1-40f6-ed38-dd7ff883b3e8" [ 1448.648968] env[68443]: _type = "Task" [ 1448.648968] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.657161] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52bc4518-8cd1-40f6-ed38-dd7ff883b3e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.718014] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1448.718178] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1448.718361] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Deleting the datastore file [datastore1] 1c1acc0d-263d-4687-93ff-291d18a592d8 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1448.718618] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a90b4c5-6d5f-4400-8fa4-db83211042c8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.724804] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for the task: (returnval){ [ 1448.724804] env[68443]: value = "task-3374008" [ 1448.724804] env[68443]: _type = "Task" [ 1448.724804] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.733245] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Task: {'id': task-3374008, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.824996] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.825200] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1448.825366] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1448.850041] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850372] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850372] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850437] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850557] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850679] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850801] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.850935] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.851100] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.851230] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1448.851350] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1448.851836] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.852038] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.852182] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1449.161065] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1449.161065] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Creating directory with path [datastore1] vmware_temp/5a8b623c-3255-4aec-8184-47ab2c44b974/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1449.161065] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43c18a13-e257-4c24-8d58-0ac6bf5134db {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.172344] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Created directory with path [datastore1] vmware_temp/5a8b623c-3255-4aec-8184-47ab2c44b974/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1449.172547] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Fetch image to [datastore1] vmware_temp/5a8b623c-3255-4aec-8184-47ab2c44b974/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1449.172707] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/5a8b623c-3255-4aec-8184-47ab2c44b974/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1449.173574] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a243447-e917-493e-ba5d-99ca6901c1a3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.179874] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fa19b0-50ec-4fd7-a24e-ab711491d9d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.188918] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854f066a-8491-406b-a973-2b50dab1f837 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.219015] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703188a7-73ec-4479-a2d0-e73282822880 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.224993] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8ad70bc2-d65b-4f1d-887b-d430e17a8359 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.234194] env[68443]: DEBUG oslo_vmware.api [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Task: {'id': task-3374008, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066845} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.234396] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1449.234578] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1449.234749] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1449.234923] env[68443]: INFO nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1449.237010] env[68443]: DEBUG nova.compute.claims [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1449.237208] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.237419] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.248250] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1449.450075] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.451717] env[68443]: ERROR nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1449.451717] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1449.452108] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] yield resources [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.driver.spawn(context, instance, image_meta, [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._fetch_image_if_missing(context, vi) [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image_fetch(context, vi, tmp_image_ds_loc) [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] images.fetch_image( [ 1449.452374] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] metadata = IMAGE_API.get(context, image_ref) [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return session.show(context, image_id, [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] _reraise_translated_image_exception(image_id) [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise new_exc.with_traceback(exc_trace) [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1449.452642] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1449.452976] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1449.453302] env[68443]: INFO nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Terminating instance [ 1449.453972] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.453972] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1449.457353] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e760fcf4-b6f5-45cd-8809-7cdb56c44bb5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.459018] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1449.459231] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1449.460162] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faba358b-0852-417c-bed4-a1bd52a1a014 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.467456] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1449.467702] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-586c886f-2fee-4739-ba87-17fa97d39141 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.470805] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1449.471134] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1449.475797] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e11b4e5c-940e-46f3-bb7f-ca54b51469fa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.483270] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1449.483270] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52c64d09-1437-0178-4890-ba95eb3bd192" [ 1449.483270] env[68443]: _type = "Task" [ 1449.483270] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.490906] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52c64d09-1437-0178-4890-ba95eb3bd192, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.541790] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1449.542022] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1449.542212] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleting the datastore file [datastore1] 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.542478] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-899209f8-e32d-4486-9072-31c49b63a9e3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.548838] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for the task: (returnval){ [ 1449.548838] env[68443]: value = "task-3374010" [ 1449.548838] env[68443]: _type = "Task" [ 1449.548838] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.560039] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': task-3374010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.576494] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4201de-cc65-4cf3-864e-b6fc85ba6b02 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.583344] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1662fb4f-509e-48cc-95fe-5addbb061b46 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.614133] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ae0094-d51f-438a-ae34-c5d9939261c1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.621474] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e78fe90-401a-4dde-b40a-27fd0ce5a809 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.636353] env[68443]: DEBUG nova.compute.provider_tree [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1449.646030] env[68443]: DEBUG nova.scheduler.client.report [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1449.665178] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.428s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.665754] env[68443]: ERROR nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1449.665754] env[68443]: Faults: ['InvalidArgument'] [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Traceback (most recent call last): [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self.driver.spawn(context, instance, image_meta, [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self._fetch_image_if_missing(context, vi) [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] image_cache(vi, tmp_image_ds_loc) [ 1449.665754] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] vm_util.copy_virtual_disk( [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] session._wait_for_task(vmdk_copy_task) [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return self.wait_for_task(task_ref) [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return evt.wait() [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] result = hub.switch() [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] return self.greenlet.switch() [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1449.666323] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] self.f(*self.args, **self.kw) [ 1449.666660] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1449.666660] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] raise exceptions.translate_fault(task_info.error) [ 1449.666660] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1449.666660] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Faults: ['InvalidArgument'] [ 1449.666660] env[68443]: ERROR nova.compute.manager [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] [ 1449.666660] env[68443]: DEBUG nova.compute.utils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1449.668099] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Build of instance 1c1acc0d-263d-4687-93ff-291d18a592d8 was re-scheduled: A specified parameter was not correct: fileType [ 1449.668099] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1449.668479] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1449.668655] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1449.668827] env[68443]: DEBUG nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1449.669018] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1449.980567] env[68443]: DEBUG nova.network.neutron [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.991962] env[68443]: INFO nova.compute.manager [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Took 0.32 seconds to deallocate network for instance. [ 1450.001823] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1450.001823] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.001823] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed6ce949-4708-4477-b180-29fcfea7623b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.012218] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.012423] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Fetch image to [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1450.012679] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1450.013369] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27821019-eae5-41c8-8117-21846e30bb85 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.020309] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a72129-f9fd-4c25-b591-e5fa1fb96d3d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.031721] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d35c9a7-8e6d-4a3a-972d-3abd1f62f5d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.069849] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e631c5-7ee4-428f-b25f-b89d1f0352d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.084731] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e045db8-77cd-4d12-a251-6b346fcbca5f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.086584] env[68443]: DEBUG oslo_vmware.api [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Task: {'id': task-3374010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077258} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.087186] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.087388] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1450.087568] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1450.087821] env[68443]: INFO nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1450.089894] env[68443]: DEBUG nova.compute.claims [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1450.090083] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.090306] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.103589] env[68443]: INFO nova.scheduler.client.report [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Deleted allocations for instance 1c1acc0d-263d-4687-93ff-291d18a592d8 [ 1450.112817] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1450.142067] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0cc7c51e-fc0a-4d20-ac93-8bcba64c3ae1 tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 689.102s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.145894] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 490.326s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.145894] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Acquiring lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.145894] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.146108] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.146186] env[68443]: INFO nova.compute.manager [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Terminating instance [ 1450.155044] env[68443]: DEBUG nova.compute.manager [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1450.155206] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1450.155748] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-812536ef-9d58-4e69-9de7-d33b4d72e767 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.159407] env[68443]: DEBUG nova.compute.manager [None req-90bb6610-682b-45c5-bf6c-7e16e59ac82d tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: 6817aec7-2b56-4a82-ad46-e1957588a8a4] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1450.169025] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a7d04b-8248-4891-9d63-237c42c64174 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.187872] env[68443]: DEBUG nova.compute.manager [None req-90bb6610-682b-45c5-bf6c-7e16e59ac82d tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] [instance: 6817aec7-2b56-4a82-ad46-e1957588a8a4] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1450.200523] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c1acc0d-263d-4687-93ff-291d18a592d8 could not be found. [ 1450.200735] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1450.200952] env[68443]: INFO nova.compute.manager [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1450.201233] env[68443]: DEBUG oslo.service.loopingcall [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.205309] env[68443]: DEBUG nova.compute.manager [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1450.205309] env[68443]: DEBUG nova.network.neutron [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1450.206375] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1450.268873] env[68443]: DEBUG oslo_concurrency.lockutils [None req-90bb6610-682b-45c5-bf6c-7e16e59ac82d tempest-ImagesTestJSON-1860039679 tempest-ImagesTestJSON-1860039679-project-member] Lock "6817aec7-2b56-4a82-ad46-e1957588a8a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.806s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.274731] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1450.274731] env[68443]: DEBUG oslo_vmware.rw_handles [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1450.277319] env[68443]: DEBUG nova.network.neutron [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.283900] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1450.286862] env[68443]: INFO nova.compute.manager [-] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] Took 0.08 seconds to deallocate network for instance. [ 1450.336756] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.392147] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3fd19d0-26dd-4b93-bc80-53084f8b513d tempest-InstanceActionsNegativeTestJSON-1093471651 tempest-InstanceActionsNegativeTestJSON-1093471651-project-member] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.249s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.393098] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 174.300s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.393316] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1c1acc0d-263d-4687-93ff-291d18a592d8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1450.393500] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "1c1acc0d-263d-4687-93ff-291d18a592d8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.483191] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8baf3f-b66c-4194-ba56-7ad144050dd0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.496024] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df49c2e-9c2d-4124-aa6b-51e13cba86d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.532115] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ca8fd1-0172-460f-9b03-f9557fec4050 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.539579] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85fb044-b909-4386-acab-5d0a56e1eff2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.553525] env[68443]: DEBUG nova.compute.provider_tree [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.562271] env[68443]: DEBUG nova.scheduler.client.report [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1450.575963] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.485s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.576538] env[68443]: ERROR nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1450.576538] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.576917] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.driver.spawn(context, instance, image_meta, [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._fetch_image_if_missing(context, vi) [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image_fetch(context, vi, tmp_image_ds_loc) [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] images.fetch_image( [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] metadata = IMAGE_API.get(context, image_ref) [ 1450.577173] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return session.show(context, image_id, [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] _reraise_translated_image_exception(image_id) [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise new_exc.with_traceback(exc_trace) [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1450.577428] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1450.577715] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.577921] env[68443]: DEBUG nova.compute.utils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1450.578481] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.242s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.579860] env[68443]: INFO nova.compute.claims [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1450.582484] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Build of instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 was re-scheduled: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1450.582950] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1450.583462] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1450.583733] env[68443]: DEBUG nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1450.583818] env[68443]: DEBUG nova.network.neutron [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1450.743961] env[68443]: DEBUG neutronclient.v2_0.client [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1450.746210] env[68443]: ERROR nova.compute.manager [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1450.746210] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.746656] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.driver.spawn(context, instance, image_meta, [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._fetch_image_if_missing(context, vi) [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image_fetch(context, vi, tmp_image_ds_loc) [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] images.fetch_image( [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] metadata = IMAGE_API.get(context, image_ref) [ 1450.746953] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return session.show(context, image_id, [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] _reraise_translated_image_exception(image_id) [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise new_exc.with_traceback(exc_trace) [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = getattr(controller, method)(*args, **kwargs) [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._get(image_id) [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1450.747313] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] resp, body = self.http_client.get(url, headers=header) [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.request(url, 'GET', **kwargs) [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self._handle_response(resp) [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exc.from_response(resp, resp.content) [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.747627] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._build_and_run_instance(context, instance, image, [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exception.RescheduledException( [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.RescheduledException: Build of instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 was re-scheduled: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1450.747926] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] exception_handler_v20(status_code, error_body) [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise client_exc(message=error_message, [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Neutron server returns request_ids: ['req-164900fb-4fc1-4757-8e18-6e3b6ec3d41b'] [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._deallocate_network(context, instance, requested_networks) [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.network_api.deallocate_for_instance( [ 1450.748265] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] data = neutron.list_ports(**search_opts) [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.list('ports', self.ports_path, retrieve_all, [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] for r in self._pagination(collection, path, **params): [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] res = self.get(path, params=params) [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.748531] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.retry_request("GET", action, body=body, [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.do_request(method, action, body=body, [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._handle_fault_response(status_code, replybody, resp) [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exception.Unauthorized() [ 1450.748880] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.Unauthorized: Not authorized. [ 1450.749219] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1450.824946] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1450.828496] env[68443]: INFO nova.scheduler.client.report [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Deleted allocations for instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 [ 1450.846706] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.854550] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9cc5542e-25de-4a21-8f96-d0fc13980ee5 tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.297s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.855692] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.514s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.855957] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Acquiring lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.856299] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.856452] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.859375] env[68443]: INFO nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Terminating instance [ 1450.861416] env[68443]: DEBUG nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1450.861619] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1450.861882] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc3ad229-d9f5-489b-a90d-7d20f6609c66 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.870617] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b34f33-a883-4304-8196-9963e4632eea {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.884622] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1450.907213] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5 could not be found. [ 1450.907448] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1450.907631] env[68443]: INFO nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1450.907927] env[68443]: DEBUG oslo.service.loopingcall [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.910793] env[68443]: DEBUG nova.compute.manager [-] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1450.910858] env[68443]: DEBUG nova.network.neutron [-] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1450.951685] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.999332] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a3eec2-ce51-4393-a226-fe429213ea0a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.008746] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e44f827-53b4-4bf9-be6f-fc4281666c4f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.042632] env[68443]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1451.042888] env[68443]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-258efd80-75be-4640-a2f9-0fbbb8385bd4'] [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1451.043663] env[68443]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1451.044125] env[68443]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.044574] env[68443]: ERROR oslo.service.loopingcall [ 1451.044963] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9426653b-b500-43b0-9d34-3e3c575e74f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.047608] env[68443]: ERROR nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.056264] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b8b9c5-9b68-4cf0-8c53-871cf31780a8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.071039] env[68443]: DEBUG nova.compute.provider_tree [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.080763] env[68443]: ERROR nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] exception_handler_v20(status_code, error_body) [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise client_exc(message=error_message, [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1451.080763] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Neutron server returns request_ids: ['req-258efd80-75be-4640-a2f9-0fbbb8385bd4'] [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During handling of the above exception, another exception occurred: [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Traceback (most recent call last): [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._delete_instance(context, instance, bdms) [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._shutdown_instance(context, instance, bdms) [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._try_deallocate_network(context, instance, requested_networks) [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] with excutils.save_and_reraise_exception(): [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.081419] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.force_reraise() [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise self.value [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] _deallocate_network_with_retries() [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return evt.wait() [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = hub.switch() [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.greenlet.switch() [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1451.081984] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = func(*self.args, **self.kw) [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] result = f(*args, **kwargs) [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._deallocate_network( [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self.network_api.deallocate_for_instance( [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] data = neutron.list_ports(**search_opts) [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.list('ports', self.ports_path, retrieve_all, [ 1451.082512] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] for r in self._pagination(collection, path, **params): [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] res = self.get(path, params=params) [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.retry_request("GET", action, body=body, [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1451.082950] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] return self.do_request(method, action, body=body, [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] ret = obj(*args, **kwargs) [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] self._handle_fault_response(status_code, replybody, resp) [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.083476] env[68443]: ERROR nova.compute.manager [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] [ 1451.085144] env[68443]: DEBUG nova.scheduler.client.report [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.103571] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.522s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.103571] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1451.104623] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.258s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.104820] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.104978] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1451.105291] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.154s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.106717] env[68443]: INFO nova.compute.claims [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.113587] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4487a3-a1e8-4864-a4fd-f998c537d15f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.116157] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.260s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.118239] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 175.025s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.118339] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1451.118519] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.123431] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1530d81f-fcc5-4256-bd08-50926c55c03a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.142538] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a656ae-afd7-494a-a99f-07e9dc729461 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.148427] env[68443]: DEBUG nova.compute.utils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1451.153975] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1451.153975] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1451.163140] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886ba3e2-e75e-4ab5-b1d6-29c32f4384de {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.171759] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1451.207813] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180988MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1451.207997] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.212224] env[68443]: INFO nova.compute.manager [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] [instance: 1f0d84b8-7f22-4b48-ab5a-04b36bfd29a5] Successfully reverted task state from None on failure for instance. [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server [None req-ea7ed8a3-584e-4aa0-bfc4-3a211b38028d tempest-DeleteServersAdminTestJSON-2008367719 tempest-DeleteServersAdminTestJSON-2008367719-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-258efd80-75be-4640-a2f9-0fbbb8385bd4'] [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1451.217696] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1451.218430] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1451.219184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1451.219930] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1451.220705] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1451.221418] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1451.222179] env[68443]: ERROR oslo_messaging.rpc.server [ 1451.257025] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1451.261078] env[68443]: DEBUG nova.policy [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd97a934ab8f48e2bf883cc4dddcdde1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dafe4b3f7d243caa51d39bfc74a4c11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1451.286024] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1451.286274] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1451.286429] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.286608] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1451.286750] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.286892] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1451.287437] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1451.287611] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1451.287779] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1451.287943] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1451.288126] env[68443]: DEBUG nova.virt.hardware [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1451.289026] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12578435-42ce-4c43-a15d-a39218a27ef1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.300444] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c971ef3-d64f-422e-9c1c-8e1f52a5df99 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.501249] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c949179a-d518-403d-a591-65bf0b46dcff {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.513760] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23358170-d664-45be-886d-281820c3556d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.552194] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dcad3c-1112-4493-9804-968c627ea90d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.560884] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a0cdb7-9f2f-4142-a2ac-6168e86aff37 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.581521] env[68443]: DEBUG nova.compute.provider_tree [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.598202] env[68443]: DEBUG nova.scheduler.client.report [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.630306] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.525s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.633021] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1451.634031] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Successfully created port: a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1451.635891] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.428s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.685979] env[68443]: DEBUG nova.compute.utils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1451.688419] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1451.688599] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1451.697626] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1451.766700] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 3842d98e-d971-456c-b287-53c513285acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.766999] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.767248] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.767450] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.767651] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.767851] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.768089] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.768320] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.768842] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.768842] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.780745] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1451.784332] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.803629] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04efeff0-d708-4ab6-bd7a-b438bf28c1d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.815750] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1451.815750] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1451.815908] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.815979] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1451.816125] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.816278] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1451.816484] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1451.816640] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1451.816863] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1451.816965] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1451.817157] env[68443]: DEBUG nova.virt.hardware [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1451.818955] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8845f4-67fb-468a-8d9e-a5e450e07128 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.823271] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.830911] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a426c27b-1f11-4e58-95de-c18528ce6364 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.836101] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0dd99061-79ed-4348-9a31-7980d6ea5db6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.848592] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5943fe10-a829-4142-a4ae-c6035fe5f4e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.861206] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance db9a6b36-6c53-4769-b93e-3c38b95533d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.878238] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91e6d317-9322-4938-a1da-f88d36499c7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.897371] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.897371] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1451.897371] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1451.987248] env[68443]: DEBUG nova.policy [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4bcdf5e5b8e443d5b18b16a64a9b70b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a43b47149a8f4ba2919bf3000f905fff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1452.196126] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbce145-13d8-4923-aea4-b40a11c05834 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.204351] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bce867-f103-4132-a285-a4786163fc20 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.236977] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e734c13a-1258-401f-9c7f-f1a15e93f814 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.245341] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebf2769-29b6-4145-a622-9d05b607c27a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.259680] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1452.269796] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1452.293689] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1452.293689] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.658s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.494590] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Successfully created port: 0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1452.772834] env[68443]: DEBUG nova.compute.manager [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Received event network-vif-plugged-a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1452.773072] env[68443]: DEBUG oslo_concurrency.lockutils [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] Acquiring lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.773294] env[68443]: DEBUG oslo_concurrency.lockutils [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.773493] env[68443]: DEBUG oslo_concurrency.lockutils [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.773668] env[68443]: DEBUG nova.compute.manager [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] No waiting events found dispatching network-vif-plugged-a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1452.773832] env[68443]: WARNING nova.compute.manager [req-2ac8bac8-6b41-4790-8790-a968d4cce561 req-4b5959e3-20be-474b-b011-c8bd9f6d52d7 service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Received unexpected event network-vif-plugged-a79f6c78-a900-42bc-b0c2-711d8a923fe2 for instance with vm_state building and task_state spawning. [ 1452.971152] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Successfully updated port: a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1452.991321] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.991488] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.991637] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1453.074952] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1453.290450] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.290450] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.344794] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Updating instance_info_cache with network_info: [{"id": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "address": "fa:16:3e:51:94:b4", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa79f6c78-a9", "ovs_interfaceid": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.358276] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.361320] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance network_info: |[{"id": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "address": "fa:16:3e:51:94:b4", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa79f6c78-a9", "ovs_interfaceid": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1453.362331] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:94:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a79f6c78-a900-42bc-b0c2-711d8a923fe2', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.372431] env[68443]: DEBUG oslo.service.loopingcall [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.373139] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1453.374518] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c28f865-b3cf-4903-9c0a-babfe4296344 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.396383] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.396383] env[68443]: value = "task-3374011" [ 1453.396383] env[68443]: _type = "Task" [ 1453.396383] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.404701] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374011, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.501244] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Successfully updated port: 0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1453.530929] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.531111] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquired lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.531277] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1453.574884] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1453.755386] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Updating instance_info_cache with network_info: [{"id": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "address": "fa:16:3e:e3:97:f2", "network": {"id": "a4dfe3e0-80af-4e20-a8e3-8fa4f8f6656e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1546025314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a43b47149a8f4ba2919bf3000f905fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0c3a67-5c", "ovs_interfaceid": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.766388] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Releasing lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.766693] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance network_info: |[{"id": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "address": "fa:16:3e:e3:97:f2", "network": {"id": "a4dfe3e0-80af-4e20-a8e3-8fa4f8f6656e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1546025314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a43b47149a8f4ba2919bf3000f905fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0c3a67-5c", "ovs_interfaceid": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1453.767148] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:97:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f0c3a67-5c81-448e-a436-8857f9317f1b', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.774942] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Creating folder: Project (a43b47149a8f4ba2919bf3000f905fff). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1453.775527] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c685b151-6b13-4a27-8ed8-cabe399ce665 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.785231] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Created folder: Project (a43b47149a8f4ba2919bf3000f905fff) in parent group-v673136. [ 1453.785413] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Creating folder: Instances. Parent ref: group-v673214. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1453.785634] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-852ef11c-856f-4fd2-86a8-9c1d005809d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.793686] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Created folder: Instances in parent group-v673214. [ 1453.793920] env[68443]: DEBUG oslo.service.loopingcall [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.794329] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1453.794390] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d852f5a8-7889-4f22-a88e-ef434a476916 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.813688] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.813688] env[68443]: value = "task-3374014" [ 1453.813688] env[68443]: _type = "Task" [ 1453.813688] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.821335] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374014, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.905936] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374011, 'name': CreateVM_Task, 'duration_secs': 0.311168} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.906197] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1453.907129] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.907374] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.907753] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1453.908351] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1673962a-ffb7-476f-87ed-b3ca3736ddf1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.913143] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1453.913143] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52b02872-ed64-c8a2-755d-394d90d564ad" [ 1453.913143] env[68443]: _type = "Task" [ 1453.913143] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.926197] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52b02872-ed64-c8a2-755d-394d90d564ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.324246] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374014, 'name': CreateVM_Task, 'duration_secs': 0.289098} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.324494] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1454.325110] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.423484] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.423757] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.423972] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.424203] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.424507] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1454.424753] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5238aa92-1ccc-4895-860f-2fc7adccead8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.429113] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for the task: (returnval){ [ 1454.429113] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]529174ae-6358-e9ca-f77d-260ffe3b93d9" [ 1454.429113] env[68443]: _type = "Task" [ 1454.429113] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.435974] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]529174ae-6358-e9ca-f77d-260ffe3b93d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.799126] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Received event network-changed-a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1454.799334] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Refreshing instance network info cache due to event network-changed-a79f6c78-a900-42bc-b0c2-711d8a923fe2. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1454.799548] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Acquiring lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.799691] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Acquired lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.799852] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Refreshing network info cache for port a79f6c78-a900-42bc-b0c2-711d8a923fe2 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1454.941981] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.942283] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.942581] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.063506] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Updated VIF entry in instance network info cache for port a79f6c78-a900-42bc-b0c2-711d8a923fe2. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1455.063874] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Updating instance_info_cache with network_info: [{"id": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "address": "fa:16:3e:51:94:b4", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa79f6c78-a9", "ovs_interfaceid": "a79f6c78-a900-42bc-b0c2-711d8a923fe2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.073244] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Releasing lock "refresh_cache-18bae6a1-3bd0-4749-8795-5b8ccd18193f" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.073511] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Received event network-vif-plugged-0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1455.073717] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Acquiring lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.073923] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.074104] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.074279] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] No waiting events found dispatching network-vif-plugged-0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1455.074446] env[68443]: WARNING nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Received unexpected event network-vif-plugged-0f0c3a67-5c81-448e-a436-8857f9317f1b for instance with vm_state building and task_state spawning. [ 1455.074610] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Received event network-changed-0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1455.074761] env[68443]: DEBUG nova.compute.manager [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Refreshing instance network info cache due to event network-changed-0f0c3a67-5c81-448e-a436-8857f9317f1b. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1455.074940] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Acquiring lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1455.075091] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Acquired lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1455.075265] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Refreshing network info cache for port 0f0c3a67-5c81-448e-a436-8857f9317f1b {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1455.517816] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Updated VIF entry in instance network info cache for port 0f0c3a67-5c81-448e-a436-8857f9317f1b. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1455.518210] env[68443]: DEBUG nova.network.neutron [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Updating instance_info_cache with network_info: [{"id": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "address": "fa:16:3e:e3:97:f2", "network": {"id": "a4dfe3e0-80af-4e20-a8e3-8fa4f8f6656e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1546025314-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a43b47149a8f4ba2919bf3000f905fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f0c3a67-5c", "ovs_interfaceid": "0f0c3a67-5c81-448e-a436-8857f9317f1b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.529354] env[68443]: DEBUG oslo_concurrency.lockutils [req-cac9c82c-c799-4e9a-836c-562cd5f72657 req-911ee0b2-0ac9-4ca3-8b82-1d3d0713c8cb service nova] Releasing lock "refresh_cache-99b16cd5-beb0-4f71-8011-411b84ddf497" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.343549] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "2985403d-348f-473d-ad1f-75fb67d3be12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.343821] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.107951] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8dd826de-cfd2-4e21-95ad-cfd3609251aa tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Acquiring lock "91194294-6f8a-4067-a7ed-610c9da3aec8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.108244] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8dd826de-cfd2-4e21-95ad-cfd3609251aa tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Lock "91194294-6f8a-4067-a7ed-610c9da3aec8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.354478] env[68443]: DEBUG oslo_concurrency.lockutils [None req-17f756af-e0ad-4f6d-902b-98b8dd62dd25 tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Acquiring lock "409e47f5-dea2-43a4-9ab6-475dc09fafb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.354723] env[68443]: DEBUG oslo_concurrency.lockutils [None req-17f756af-e0ad-4f6d-902b-98b8dd62dd25 tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Lock "409e47f5-dea2-43a4-9ab6-475dc09fafb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.041588] env[68443]: WARNING oslo_vmware.rw_handles [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1499.041588] env[68443]: ERROR oslo_vmware.rw_handles [ 1499.042249] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1499.043908] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1499.044164] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Copying Virtual Disk [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/5084bae1-821e-40c8-b911-802e9b8483fe/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1499.044461] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63a57e00-a465-4c34-8c40-44ca2301e65c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.053044] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1499.053044] env[68443]: value = "task-3374015" [ 1499.053044] env[68443]: _type = "Task" [ 1499.053044] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.060644] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.563727] env[68443]: DEBUG oslo_vmware.exceptions [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1499.564026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.564567] env[68443]: ERROR nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1499.564567] env[68443]: Faults: ['InvalidArgument'] [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] Traceback (most recent call last): [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] yield resources [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self.driver.spawn(context, instance, image_meta, [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self._fetch_image_if_missing(context, vi) [ 1499.564567] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] image_cache(vi, tmp_image_ds_loc) [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] vm_util.copy_virtual_disk( [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] session._wait_for_task(vmdk_copy_task) [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return self.wait_for_task(task_ref) [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return evt.wait() [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] result = hub.switch() [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1499.564883] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return self.greenlet.switch() [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self.f(*self.args, **self.kw) [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] raise exceptions.translate_fault(task_info.error) [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] Faults: ['InvalidArgument'] [ 1499.565234] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] [ 1499.565234] env[68443]: INFO nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Terminating instance [ 1499.566459] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.566668] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.566911] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55a0b0eb-03e8-4493-8b17-7e2cb08214db {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.569078] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1499.569277] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1499.569986] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989da685-27ae-48ab-a1f0-056ab8fae947 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.576825] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1499.577065] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2129f72-8e90-400f-a3d4-7a0c7e0a2c4c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.579185] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.579362] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1499.580299] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ff1695-aeb2-4c3f-aaf0-d78c72591ca2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.584750] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1499.584750] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52bab4a6-1e63-01db-7686-64d47da7b8b5" [ 1499.584750] env[68443]: _type = "Task" [ 1499.584750] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.592677] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52bab4a6-1e63-01db-7686-64d47da7b8b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.653226] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1499.653635] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1499.653936] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleting the datastore file [datastore1] 3842d98e-d971-456c-b287-53c513285acf {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.654264] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1dc377a5-c342-44aa-b2f4-5263434f6146 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.660394] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1499.660394] env[68443]: value = "task-3374017" [ 1499.660394] env[68443]: _type = "Task" [ 1499.660394] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.668090] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.095487] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1500.095817] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1500.096141] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a49dcf2-dbe9-4fd6-b38f-43e8cb0b94d0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.107901] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1500.108104] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Fetch image to [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1500.108280] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1500.108991] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eabb91-7bdb-4b98-afe9-5948227ce4fd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.115587] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f749008c-0661-4208-b060-bb97db994449 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.124820] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89d864f-5601-4efd-b800-c56ce49383a8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.156339] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb4ed62-b36a-4a19-8e6a-db0f1443ab93 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.164522] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-48ddcbfd-1fea-426e-b9ab-efdf64624059 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.171049] env[68443]: DEBUG oslo_vmware.api [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080428} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.171302] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.171482] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1500.171655] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1500.171846] env[68443]: INFO nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1500.173914] env[68443]: DEBUG nova.compute.claims [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1500.174104] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.174317] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.185958] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1500.235134] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1500.295050] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1500.295232] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1500.494334] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76383a3f-17e6-4b72-bb95-69d3db7adb5c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.502450] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61900c2-c89b-4e92-9cd5-8002fcf66c6c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.532778] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2985cf-79af-4fa0-8e1a-06a24c7bd59f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.540019] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad6f897-5ea7-4d64-9f73-72f7d908bc71 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.553017] env[68443]: DEBUG nova.compute.provider_tree [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1500.564256] env[68443]: DEBUG nova.scheduler.client.report [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1500.580911] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.581469] env[68443]: ERROR nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1500.581469] env[68443]: Faults: ['InvalidArgument'] [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] Traceback (most recent call last): [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self.driver.spawn(context, instance, image_meta, [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self._fetch_image_if_missing(context, vi) [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] image_cache(vi, tmp_image_ds_loc) [ 1500.581469] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] vm_util.copy_virtual_disk( [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] session._wait_for_task(vmdk_copy_task) [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return self.wait_for_task(task_ref) [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return evt.wait() [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] result = hub.switch() [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] return self.greenlet.switch() [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1500.581838] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] self.f(*self.args, **self.kw) [ 1500.582185] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1500.582185] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] raise exceptions.translate_fault(task_info.error) [ 1500.582185] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1500.582185] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] Faults: ['InvalidArgument'] [ 1500.582185] env[68443]: ERROR nova.compute.manager [instance: 3842d98e-d971-456c-b287-53c513285acf] [ 1500.582185] env[68443]: DEBUG nova.compute.utils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1500.586698] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Build of instance 3842d98e-d971-456c-b287-53c513285acf was re-scheduled: A specified parameter was not correct: fileType [ 1500.586698] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1500.587095] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1500.587274] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1500.587446] env[68443]: DEBUG nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1500.587614] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1500.915912] env[68443]: DEBUG nova.network.neutron [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.927064] env[68443]: INFO nova.compute.manager [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Took 0.34 seconds to deallocate network for instance. [ 1501.018350] env[68443]: INFO nova.scheduler.client.report [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted allocations for instance 3842d98e-d971-456c-b287-53c513285acf [ 1501.041356] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2c24705a-15b9-4346-aa89-b880fafb69b9 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 589.062s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.042663] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.717s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.042883] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "3842d98e-d971-456c-b287-53c513285acf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.043109] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.043278] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.046836] env[68443]: INFO nova.compute.manager [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Terminating instance [ 1501.049547] env[68443]: DEBUG nova.compute.manager [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1501.049547] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1501.049547] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-771988f4-9bdc-491d-aaf4-c12cb4243c27 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.058899] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e6f4ec-9cef-4e61-ae6e-53a95afe723d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.070043] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1501.090739] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3842d98e-d971-456c-b287-53c513285acf could not be found. [ 1501.090951] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1501.091149] env[68443]: INFO nova.compute.manager [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 3842d98e-d971-456c-b287-53c513285acf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1501.091404] env[68443]: DEBUG oslo.service.loopingcall [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1501.091651] env[68443]: DEBUG nova.compute.manager [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1501.091763] env[68443]: DEBUG nova.network.neutron [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1501.121083] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.121083] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.123105] env[68443]: INFO nova.compute.claims [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1501.124525] env[68443]: DEBUG nova.network.neutron [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.133997] env[68443]: INFO nova.compute.manager [-] [instance: 3842d98e-d971-456c-b287-53c513285acf] Took 0.04 seconds to deallocate network for instance. [ 1501.234865] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f2a59e34-0895-4732-a279-a53049d00259 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "3842d98e-d971-456c-b287-53c513285acf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.236100] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "3842d98e-d971-456c-b287-53c513285acf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 225.143s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.236342] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 3842d98e-d971-456c-b287-53c513285acf] During sync_power_state the instance has a pending task (deleting). Skip. [ 1501.236527] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "3842d98e-d971-456c-b287-53c513285acf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.398049] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf30fc7-2dca-42d6-8f30-a1e7fa7e2bac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.405645] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72a2d79-0a99-4ab1-aa36-31b331479f8b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.435533] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519ec371-d613-47a3-9574-b4de0e763a67 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.442162] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28390e31-27af-4eeb-9ef4-d5aa88ea21f2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.455163] env[68443]: DEBUG nova.compute.provider_tree [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.463802] env[68443]: DEBUG nova.scheduler.client.report [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1501.476670] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.477126] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1501.564862] env[68443]: DEBUG nova.compute.utils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.566137] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1501.566312] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1501.576101] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1501.633859] env[68443]: DEBUG nova.policy [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c9056fd69304807abfeb2fedc4ae20f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d0592ea4b3c49698b73391ae2be0ad8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1501.643472] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1501.669990] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1501.670275] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1501.670437] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1501.670621] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1501.670768] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1501.670914] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1501.671138] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1501.671301] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1501.671466] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1501.671626] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1501.671795] env[68443]: DEBUG nova.virt.hardware [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1501.672732] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2f978d-8750-4cb3-a5a5-a38818e41e73 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.680486] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249777ce-a662-4956-a341-33afd69e5e3b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.942106] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Successfully created port: 76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1502.432782] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.571869] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Successfully updated port: 76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1502.584227] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.584376] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.584524] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1502.627946] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1502.783977] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Updating instance_info_cache with network_info: [{"id": "76f5451d-5caf-4671-9be0-6ee23828a61c", "address": "fa:16:3e:7e:65:26", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f5451d-5c", "ovs_interfaceid": "76f5451d-5caf-4671-9be0-6ee23828a61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.795217] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.795500] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance network_info: |[{"id": "76f5451d-5caf-4671-9be0-6ee23828a61c", "address": "fa:16:3e:7e:65:26", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f5451d-5c", "ovs_interfaceid": "76f5451d-5caf-4671-9be0-6ee23828a61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1502.795898] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:65:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76f5451d-5caf-4671-9be0-6ee23828a61c', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1502.803525] env[68443]: DEBUG oslo.service.loopingcall [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.803984] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1502.804234] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5318873-349e-45f7-b6f9-d8e6053aac4f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.824537] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1502.824537] env[68443]: value = "task-3374018" [ 1502.824537] env[68443]: _type = "Task" [ 1502.824537] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.832282] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374018, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.256913] env[68443]: DEBUG nova.compute.manager [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Received event network-vif-plugged-76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1503.257136] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Acquiring lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.257293] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.257462] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.257624] env[68443]: DEBUG nova.compute.manager [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] No waiting events found dispatching network-vif-plugged-76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1503.257784] env[68443]: WARNING nova.compute.manager [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Received unexpected event network-vif-plugged-76f5451d-5caf-4671-9be0-6ee23828a61c for instance with vm_state building and task_state spawning. [ 1503.257938] env[68443]: DEBUG nova.compute.manager [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Received event network-changed-76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1503.258103] env[68443]: DEBUG nova.compute.manager [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Refreshing instance network info cache due to event network-changed-76f5451d-5caf-4671-9be0-6ee23828a61c. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1503.258282] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Acquiring lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.258416] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Acquired lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.258570] env[68443]: DEBUG nova.network.neutron [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Refreshing network info cache for port 76f5451d-5caf-4671-9be0-6ee23828a61c {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1503.334950] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374018, 'name': CreateVM_Task, 'duration_secs': 0.292688} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.335146] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1503.335802] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.335969] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.336295] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1503.336573] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22efce1d-2a70-418c-86ec-09afa941b6d2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.342030] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1503.342030] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]525bff94-8d4d-e5d8-fa5d-f99d1be86aae" [ 1503.342030] env[68443]: _type = "Task" [ 1503.342030] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.350793] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]525bff94-8d4d-e5d8-fa5d-f99d1be86aae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.519536] env[68443]: DEBUG nova.network.neutron [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Updated VIF entry in instance network info cache for port 76f5451d-5caf-4671-9be0-6ee23828a61c. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1503.519896] env[68443]: DEBUG nova.network.neutron [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Updating instance_info_cache with network_info: [{"id": "76f5451d-5caf-4671-9be0-6ee23828a61c", "address": "fa:16:3e:7e:65:26", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76f5451d-5c", "ovs_interfaceid": "76f5451d-5caf-4671-9be0-6ee23828a61c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.529770] env[68443]: DEBUG oslo_concurrency.lockutils [req-d50b157e-e460-4962-ac1d-983aef9a7577 req-fb1d4f8f-f793-477b-bbf3-f135204d5659 service nova] Releasing lock "refresh_cache-0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.851985] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.852258] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1503.852520] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.825268] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.824521] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.824825] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1509.825594] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.825386] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.825386] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1510.825625] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1510.851617] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.851789] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.851870] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852020] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852178] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852408] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852528] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852648] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852810] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1510.852943] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1510.853533] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.853683] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1512.825398] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.843259] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.843509] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.843691] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.843867] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1512.845028] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd86f95f-0c89-4c7f-9fc2-a3f3a7491f5e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.854324] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d757e5c0-b1bd-4b55-aa07-ab3b60332405 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.869349] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a339729-696a-4c2c-9dbc-dc717ed9e92e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.875817] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc7b892-3f7f-4831-8207-22ccf38f45e9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.905527] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181006MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1512.905673] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.905864] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.979991] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980177] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980304] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980425] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980541] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980670] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.980878] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.981014] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.981135] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.981248] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1512.992871] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04efeff0-d708-4ab6-bd7a-b438bf28c1d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.004243] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.013743] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0dd99061-79ed-4348-9a31-7980d6ea5db6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.023195] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 5943fe10-a829-4142-a4ae-c6035fe5f4e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.032249] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance db9a6b36-6c53-4769-b93e-3c38b95533d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.040963] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91e6d317-9322-4938-a1da-f88d36499c7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.049923] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.058551] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.066762] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91194294-6f8a-4067-a7ed-610c9da3aec8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.075247] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 409e47f5-dea2-43a4-9ab6-475dc09fafb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1513.075477] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1513.075619] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1513.289996] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aceac5a-03bb-4e2e-af36-9b5f2e2db87c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.297341] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16438610-497e-4345-87bd-36dd7907469a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.328971] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307aac35-4eda-456e-bbd5-ae5ea1088915 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.336295] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926d9ecb-f370-4de5-877e-e3065f7d15c9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.349862] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.359099] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1513.373358] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1513.373547] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.468s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.369441] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.369765] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.820195] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.076093] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "99b16cd5-beb0-4f71-8011-411b84ddf497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.370866] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.371197] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.229848] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.000703] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.000983] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.333790] env[68443]: WARNING oslo_vmware.rw_handles [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1548.333790] env[68443]: ERROR oslo_vmware.rw_handles [ 1548.334449] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1548.336336] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1548.336701] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Copying Virtual Disk [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/ecb00eb7-9456-40e6-97fe-adbda5cb7997/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1548.336860] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f6ae12c-11ff-4576-a416-9fc23b57d0ea {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.344335] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1548.344335] env[68443]: value = "task-3374019" [ 1548.344335] env[68443]: _type = "Task" [ 1548.344335] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.353673] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.854442] env[68443]: DEBUG oslo_vmware.exceptions [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1548.854722] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.855307] env[68443]: ERROR nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1548.855307] env[68443]: Faults: ['InvalidArgument'] [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Traceback (most recent call last): [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] yield resources [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self.driver.spawn(context, instance, image_meta, [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self._fetch_image_if_missing(context, vi) [ 1548.855307] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] image_cache(vi, tmp_image_ds_loc) [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] vm_util.copy_virtual_disk( [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] session._wait_for_task(vmdk_copy_task) [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return self.wait_for_task(task_ref) [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return evt.wait() [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] result = hub.switch() [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1548.855701] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return self.greenlet.switch() [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self.f(*self.args, **self.kw) [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] raise exceptions.translate_fault(task_info.error) [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Faults: ['InvalidArgument'] [ 1548.856083] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] [ 1548.856083] env[68443]: INFO nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Terminating instance [ 1548.857180] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.857390] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.858057] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1548.858252] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1548.858475] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b1f0d97-3385-4efd-b72a-8d683d07fb54 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.860886] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527bd9b7-0c37-4c72-9bdf-1998a772f4bf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.867461] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1548.867668] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86337b84-e98e-431d-8dbe-2fef06fbdc39 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.869773] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.869946] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1548.870872] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d2f16a-e710-4efd-800d-58dee475dc60 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.875609] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1548.875609] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]525eb5db-e1e3-33c2-c292-6aa5b0ceb7c7" [ 1548.875609] env[68443]: _type = "Task" [ 1548.875609] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.882480] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]525eb5db-e1e3-33c2-c292-6aa5b0ceb7c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.941950] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1548.942192] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1548.942372] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleting the datastore file [datastore1] 6e162408-6d3d-42e0-8992-f5843e9e7855 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1548.942627] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e275c784-e33d-4486-a0c5-7b0605b54c00 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.949625] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1548.949625] env[68443]: value = "task-3374021" [ 1548.949625] env[68443]: _type = "Task" [ 1548.949625] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.956917] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.386123] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1549.386424] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating directory with path [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.386628] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67d779f4-ab27-4b9b-8c49-cba42ffbaea0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.398476] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created directory with path [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.398681] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Fetch image to [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1549.398852] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1549.399699] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650d1f22-ea4e-4ec2-88db-027d6086ff42 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.406471] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ba89ae-fccd-4cd1-891b-b0b7caea2deb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.415439] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce4b8e9-faae-4b25-b0d3-e24de5c8bf5f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.445975] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9e544a-e473-4ab2-8d0a-f40d80646724 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.456235] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8a833a58-e9db-4d3b-a9b1-3a75c31ec90b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.461827] env[68443]: DEBUG oslo_vmware.api [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074061} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.462649] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1549.465279] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1549.465279] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1549.465279] env[68443]: INFO nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1549.467852] env[68443]: DEBUG nova.compute.claims [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1549.468032] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.468253] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.485171] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1549.542855] env[68443]: DEBUG oslo_vmware.rw_handles [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1549.602224] env[68443]: DEBUG oslo_vmware.rw_handles [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1549.602418] env[68443]: DEBUG oslo_vmware.rw_handles [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1549.830159] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b87d6c4-0a94-4965-ae20-825b6b8dfb8f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.838663] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccdbc62-142e-47a2-9197-2ebf06a550da {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.868734] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a43064e-d95c-4501-87ad-57c0f13e241c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.875597] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c393da8-b9ad-4714-a87f-d78b6faa2c93 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.888056] env[68443]: DEBUG nova.compute.provider_tree [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.898316] env[68443]: DEBUG nova.scheduler.client.report [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1549.912854] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.445s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.913384] env[68443]: ERROR nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1549.913384] env[68443]: Faults: ['InvalidArgument'] [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Traceback (most recent call last): [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self.driver.spawn(context, instance, image_meta, [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self._fetch_image_if_missing(context, vi) [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] image_cache(vi, tmp_image_ds_loc) [ 1549.913384] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] vm_util.copy_virtual_disk( [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] session._wait_for_task(vmdk_copy_task) [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return self.wait_for_task(task_ref) [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return evt.wait() [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] result = hub.switch() [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] return self.greenlet.switch() [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1549.913796] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] self.f(*self.args, **self.kw) [ 1549.914222] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1549.914222] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] raise exceptions.translate_fault(task_info.error) [ 1549.914222] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1549.914222] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Faults: ['InvalidArgument'] [ 1549.914222] env[68443]: ERROR nova.compute.manager [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] [ 1549.914222] env[68443]: DEBUG nova.compute.utils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1549.915555] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Build of instance 6e162408-6d3d-42e0-8992-f5843e9e7855 was re-scheduled: A specified parameter was not correct: fileType [ 1549.915555] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1549.915911] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1549.916093] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1549.916348] env[68443]: DEBUG nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1549.916418] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1550.229063] env[68443]: DEBUG nova.network.neutron [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.241015] env[68443]: INFO nova.compute.manager [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Took 0.32 seconds to deallocate network for instance. [ 1550.334736] env[68443]: INFO nova.scheduler.client.report [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted allocations for instance 6e162408-6d3d-42e0-8992-f5843e9e7855 [ 1550.360556] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c3477e21-e2c2-4077-9d6c-29d43cadfc09 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 605.017s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.361728] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 407.993s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.362054] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.362285] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.362449] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.364477] env[68443]: INFO nova.compute.manager [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Terminating instance [ 1550.366167] env[68443]: DEBUG nova.compute.manager [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1550.366355] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1550.366835] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4237733f-726a-49b6-95b6-ab8445ca8a0a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.376489] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c20afa-cc6a-4929-8913-5335ec004953 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.387432] env[68443]: DEBUG nova.compute.manager [None req-c8575d97-1541-4496-a77b-a66c3e6b7fc4 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: 04efeff0-d708-4ab6-bd7a-b438bf28c1d7] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1550.408830] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e162408-6d3d-42e0-8992-f5843e9e7855 could not be found. [ 1550.409047] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1550.409940] env[68443]: INFO nova.compute.manager [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1550.409940] env[68443]: DEBUG oslo.service.loopingcall [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.409940] env[68443]: DEBUG nova.compute.manager [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1550.409940] env[68443]: DEBUG nova.network.neutron [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1550.419677] env[68443]: DEBUG nova.compute.manager [None req-c8575d97-1541-4496-a77b-a66c3e6b7fc4 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: 04efeff0-d708-4ab6-bd7a-b438bf28c1d7] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1550.438420] env[68443]: DEBUG nova.network.neutron [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.445194] env[68443]: INFO nova.compute.manager [-] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] Took 0.04 seconds to deallocate network for instance. [ 1550.452023] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c8575d97-1541-4496-a77b-a66c3e6b7fc4 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "04efeff0-d708-4ab6-bd7a-b438bf28c1d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.095s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.458715] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1550.562384] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.562967] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.564217] env[68443]: INFO nova.compute.claims [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.578736] env[68443]: DEBUG oslo_concurrency.lockutils [None req-04b14849-4eb2-419a-857d-63edef1eb131 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.579078] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 274.486s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.580229] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6e162408-6d3d-42e0-8992-f5843e9e7855] During sync_power_state the instance has a pending task (deleting). Skip. [ 1550.580427] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "6e162408-6d3d-42e0-8992-f5843e9e7855" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.892828] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bc0937-0ac4-4c90-8937-755dd7ad86c1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.900715] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817b75e9-306a-483e-b615-11b8139660b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.930894] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e72e50-3990-4bf6-a83f-c2438dbd3e4e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.938444] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2206e654-9821-4809-ab9c-058c0c5a6d91 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.951438] env[68443]: DEBUG nova.compute.provider_tree [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.960126] env[68443]: DEBUG nova.scheduler.client.report [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1550.978749] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.414s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.978749] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1551.025275] env[68443]: DEBUG nova.compute.utils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1551.027151] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1551.027388] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1551.035655] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1551.109537] env[68443]: DEBUG nova.policy [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '267f1cc5982049579842611acbadcb85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6693fbeca44449939d27838029d25353', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1551.113846] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1551.140372] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1551.140650] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1551.140843] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1551.141041] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1551.141195] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1551.141341] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1551.141549] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1551.141707] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1551.141869] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1551.142179] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1551.142457] env[68443]: DEBUG nova.virt.hardware [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1551.143414] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffcf8765-b49b-4bc4-bf26-9b06f09c3c96 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.152178] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae83a767-02ac-40bc-a709-9d8bc55c97f3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.546846] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Successfully created port: e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.163175] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Successfully updated port: e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1552.175694] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.175838] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.175983] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1552.222423] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1552.279925] env[68443]: DEBUG nova.compute.manager [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Received event network-vif-plugged-e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1552.279925] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Acquiring lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.279925] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.279925] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.280070] env[68443]: DEBUG nova.compute.manager [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] No waiting events found dispatching network-vif-plugged-e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1552.280070] env[68443]: WARNING nova.compute.manager [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Received unexpected event network-vif-plugged-e77d3954-b1b4-4ca6-891e-27cada35e5d7 for instance with vm_state building and task_state spawning. [ 1552.280132] env[68443]: DEBUG nova.compute.manager [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Received event network-changed-e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1552.280286] env[68443]: DEBUG nova.compute.manager [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Refreshing instance network info cache due to event network-changed-e77d3954-b1b4-4ca6-891e-27cada35e5d7. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1552.280601] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Acquiring lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.393862] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Updating instance_info_cache with network_info: [{"id": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "address": "fa:16:3e:18:60:59", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77d3954-b1", "ovs_interfaceid": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.405090] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.405407] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance network_info: |[{"id": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "address": "fa:16:3e:18:60:59", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77d3954-b1", "ovs_interfaceid": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1552.405746] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Acquired lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.405878] env[68443]: DEBUG nova.network.neutron [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Refreshing network info cache for port e77d3954-b1b4-4ca6-891e-27cada35e5d7 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1552.406905] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:60:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e77d3954-b1b4-4ca6-891e-27cada35e5d7', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.415011] env[68443]: DEBUG oslo.service.loopingcall [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.417975] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1552.418779] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f04dd0d-fa98-4788-8bc1-776dab95b502 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.438114] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.438114] env[68443]: value = "task-3374022" [ 1552.438114] env[68443]: _type = "Task" [ 1552.438114] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.445433] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374022, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.680473] env[68443]: DEBUG nova.network.neutron [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Updated VIF entry in instance network info cache for port e77d3954-b1b4-4ca6-891e-27cada35e5d7. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1552.680473] env[68443]: DEBUG nova.network.neutron [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Updating instance_info_cache with network_info: [{"id": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "address": "fa:16:3e:18:60:59", "network": {"id": "37eeb189-6d46-4f68-ab89-f71102bdc722", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-999532111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6693fbeca44449939d27838029d25353", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape77d3954-b1", "ovs_interfaceid": "e77d3954-b1b4-4ca6-891e-27cada35e5d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.692355] env[68443]: DEBUG oslo_concurrency.lockutils [req-774a99a1-a9b3-4238-95ad-f0e270cb569b req-d9ae178e-4926-4a02-b9ab-545a55e91202 service nova] Releasing lock "refresh_cache-7366efe5-c640-4689-97a1-fba0ac431b12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.949109] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374022, 'name': CreateVM_Task, 'duration_secs': 0.277881} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.949109] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1552.949361] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.949516] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.949841] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1552.950094] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-120bc064-bd70-428e-bdd0-c54b9354d7ae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.954487] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1552.954487] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52bb8cab-58cf-0681-d3d4-0d977f3d8feb" [ 1552.954487] env[68443]: _type = "Task" [ 1552.954487] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.961561] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52bb8cab-58cf-0681-d3d4-0d977f3d8feb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.465247] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.465514] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1553.465728] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.524576] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "7366efe5-c640-4689-97a1-fba0ac431b12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.827075] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.827361] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1565.838410] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1566.836093] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.825726] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.825761] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1569.825594] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.825644] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.826072] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1570.826072] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1570.858739] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.858906] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859043] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859180] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859305] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859430] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859549] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859670] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859791] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.859906] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1570.860034] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1570.862525] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.862702] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1571.834363] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1571.834924] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1573.825072] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.825451] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1573.837417] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.837694] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.837916] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.838127] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1573.839398] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5964f21e-19bc-462c-8c48-1913dad1b9e2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.848556] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e596ef0-153d-4961-b2d8-a1b05078aebf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.862270] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d67bbf-5219-4005-9bbc-cd38a5c2758d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.868443] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd718bd2-d8a3-484d-84b4-46055ccf3b95 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.896714] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180964MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1573.896858] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.897053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.057206] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance fd0de9a2-7a54-46be-8b6a-3415366e110c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.057382] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.057509] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.057630] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.057750] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.057867] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.058047] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.058180] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.058299] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.058413] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1574.070433] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance db9a6b36-6c53-4769-b93e-3c38b95533d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.080227] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91e6d317-9322-4938-a1da-f88d36499c7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.089472] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.098898] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.107962] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91194294-6f8a-4067-a7ed-610c9da3aec8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.118774] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 409e47f5-dea2-43a4-9ab6-475dc09fafb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.127635] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.136573] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1574.136773] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1574.136919] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1574.153657] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1574.168533] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1574.168753] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1574.181148] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1574.198935] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1574.408119] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ed8461-1d6e-46e2-a26e-09ac0e2f6716 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.415623] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d8ff69-b793-477b-9b12-d8ad4efaaddd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.444881] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f061323-fc5c-42da-9a5d-fac245970a60 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.451100] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879f8199-9c28-4a5d-a4b8-a72f50810b83 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.463923] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.473054] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.486321] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1574.486321] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.589s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.486518] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1576.489793] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1599.076181] env[68443]: WARNING oslo_vmware.rw_handles [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1599.076181] env[68443]: ERROR oslo_vmware.rw_handles [ 1599.076749] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1599.079058] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1599.079267] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Copying Virtual Disk [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/56b60719-e4b9-45e8-a655-dc2e6149d860/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1599.079588] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5f039af-fa47-441f-a790-76ae4daf0ff5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.089082] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1599.089082] env[68443]: value = "task-3374023" [ 1599.089082] env[68443]: _type = "Task" [ 1599.089082] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.096967] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.599808] env[68443]: DEBUG oslo_vmware.exceptions [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1599.600198] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.600742] env[68443]: ERROR nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1599.600742] env[68443]: Faults: ['InvalidArgument'] [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Traceback (most recent call last): [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] yield resources [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self.driver.spawn(context, instance, image_meta, [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self._fetch_image_if_missing(context, vi) [ 1599.600742] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] image_cache(vi, tmp_image_ds_loc) [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] vm_util.copy_virtual_disk( [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] session._wait_for_task(vmdk_copy_task) [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return self.wait_for_task(task_ref) [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return evt.wait() [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] result = hub.switch() [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1599.601067] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return self.greenlet.switch() [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self.f(*self.args, **self.kw) [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] raise exceptions.translate_fault(task_info.error) [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Faults: ['InvalidArgument'] [ 1599.601556] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] [ 1599.601556] env[68443]: INFO nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Terminating instance [ 1599.602772] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.602882] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1599.603512] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1599.603692] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1599.603923] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a6380cb-d82e-4c46-a2f6-fdba15fc3445 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.606328] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d7f6af-e140-46f3-88a8-7f996dc2a368 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.612888] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1599.613111] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-463106d7-ef3d-4dda-a91a-ebad0bea33bb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.615178] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1599.615358] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1599.616324] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aefae181-17fb-4b07-8d1c-96458cae6415 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.621547] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1599.621547] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52c986af-235a-85e8-367c-6d2499dfa219" [ 1599.621547] env[68443]: _type = "Task" [ 1599.621547] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.628805] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52c986af-235a-85e8-367c-6d2499dfa219, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.923905] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1599.924138] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1599.924320] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleting the datastore file [datastore1] fd0de9a2-7a54-46be-8b6a-3415366e110c {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1599.924589] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11bcccd0-8089-4b33-a0cf-9db6c26a3d5a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.931313] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1599.931313] env[68443]: value = "task-3374025" [ 1599.931313] env[68443]: _type = "Task" [ 1599.931313] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.938858] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.131573] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1600.131846] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1600.132076] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-881ac039-b049-4fc5-8268-737f367a3211 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.143187] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1600.143390] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Fetch image to [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1600.143565] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1600.144321] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d250c95e-4b5f-49c3-b060-a1b9e78704a2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.151093] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe1b9b1-7908-4f02-a9c7-330808ce6a52 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.160353] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca05435-1b4f-450b-9009-8ee61495c3f0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.194943] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c7955a-0317-4cfb-a5ad-a7a6dc642276 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.201288] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e5dd02c-5270-43e5-93f6-2b1bfd1d9aed {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.221195] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1600.274853] env[68443]: DEBUG oslo_vmware.rw_handles [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1600.335124] env[68443]: DEBUG oslo_vmware.rw_handles [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1600.335333] env[68443]: DEBUG oslo_vmware.rw_handles [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1600.441376] env[68443]: DEBUG oslo_vmware.api [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077012} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.441565] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.441748] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1600.441927] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1600.442117] env[68443]: INFO nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Took 0.84 seconds to destroy the instance on the hypervisor. [ 1600.444372] env[68443]: DEBUG nova.compute.claims [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1600.444546] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.444776] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.667480] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410a7c14-1e97-4f12-b73e-52b00f07eb5d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.674747] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a885098-ea42-4634-8c9d-c051eccdffff {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.703848] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc17635c-bdbb-49fa-82b4-0d5a3e4be5b6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.710681] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c55a1b2-740b-4159-90ac-959f8d5c8ae8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.723493] env[68443]: DEBUG nova.compute.provider_tree [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.733691] env[68443]: DEBUG nova.scheduler.client.report [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1600.747559] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.748079] env[68443]: ERROR nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1600.748079] env[68443]: Faults: ['InvalidArgument'] [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Traceback (most recent call last): [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self.driver.spawn(context, instance, image_meta, [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self._fetch_image_if_missing(context, vi) [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] image_cache(vi, tmp_image_ds_loc) [ 1600.748079] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] vm_util.copy_virtual_disk( [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] session._wait_for_task(vmdk_copy_task) [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return self.wait_for_task(task_ref) [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return evt.wait() [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] result = hub.switch() [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] return self.greenlet.switch() [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1600.748457] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] self.f(*self.args, **self.kw) [ 1600.748817] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1600.748817] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] raise exceptions.translate_fault(task_info.error) [ 1600.748817] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1600.748817] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Faults: ['InvalidArgument'] [ 1600.748817] env[68443]: ERROR nova.compute.manager [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] [ 1600.748817] env[68443]: DEBUG nova.compute.utils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1600.750121] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Build of instance fd0de9a2-7a54-46be-8b6a-3415366e110c was re-scheduled: A specified parameter was not correct: fileType [ 1600.750121] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1600.750481] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1600.750653] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1600.750825] env[68443]: DEBUG nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1600.750988] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1601.071887] env[68443]: DEBUG nova.network.neutron [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.084344] env[68443]: INFO nova.compute.manager [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Took 0.33 seconds to deallocate network for instance. [ 1601.176730] env[68443]: INFO nova.scheduler.client.report [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleted allocations for instance fd0de9a2-7a54-46be-8b6a-3415366e110c [ 1601.198684] env[68443]: DEBUG oslo_concurrency.lockutils [None req-983d8635-665e-4c4a-834f-3adb0f8c6f07 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 639.158s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.200160] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.179s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.200371] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.200582] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.200746] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.202869] env[68443]: INFO nova.compute.manager [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Terminating instance [ 1601.204691] env[68443]: DEBUG nova.compute.manager [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1601.204858] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1601.205390] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b31dba4e-610c-47c9-80b4-7aab7f32811d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.214470] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ad167c-5277-4940-a495-c6d688497fb5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.225697] env[68443]: DEBUG nova.compute.manager [None req-d6998428-1eaa-40e5-8ef3-f54e2672bcb6 tempest-ServerShowV257Test-1338230860 tempest-ServerShowV257Test-1338230860-project-member] [instance: 0dd99061-79ed-4348-9a31-7980d6ea5db6] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1601.247457] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd0de9a2-7a54-46be-8b6a-3415366e110c could not be found. [ 1601.247457] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1601.247457] env[68443]: INFO nova.compute.manager [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1601.247457] env[68443]: DEBUG oslo.service.loopingcall [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1601.248300] env[68443]: DEBUG nova.compute.manager [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1601.248300] env[68443]: DEBUG nova.network.neutron [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1601.250422] env[68443]: DEBUG nova.compute.manager [None req-d6998428-1eaa-40e5-8ef3-f54e2672bcb6 tempest-ServerShowV257Test-1338230860 tempest-ServerShowV257Test-1338230860-project-member] [instance: 0dd99061-79ed-4348-9a31-7980d6ea5db6] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1601.273381] env[68443]: DEBUG nova.network.neutron [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.274921] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d6998428-1eaa-40e5-8ef3-f54e2672bcb6 tempest-ServerShowV257Test-1338230860 tempest-ServerShowV257Test-1338230860-project-member] Lock "0dd99061-79ed-4348-9a31-7980d6ea5db6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.104s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.285517] env[68443]: INFO nova.compute.manager [-] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] Took 0.04 seconds to deallocate network for instance. [ 1601.288260] env[68443]: DEBUG nova.compute.manager [None req-4d0a7f73-0242-487f-8f9e-a02ac9398c39 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: 5943fe10-a829-4142-a4ae-c6035fe5f4e8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1601.314117] env[68443]: DEBUG nova.compute.manager [None req-4d0a7f73-0242-487f-8f9e-a02ac9398c39 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] [instance: 5943fe10-a829-4142-a4ae-c6035fe5f4e8] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1601.336306] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4d0a7f73-0242-487f-8f9e-a02ac9398c39 tempest-AttachVolumeNegativeTest-1393756799 tempest-AttachVolumeNegativeTest-1393756799-project-member] Lock "5943fe10-a829-4142-a4ae-c6035fe5f4e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.135s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.347240] env[68443]: DEBUG nova.compute.manager [None req-f8f671bb-d719-4c2c-9d33-da95afa70cda tempest-ServerRescueTestJSON-902940739 tempest-ServerRescueTestJSON-902940739-project-member] [instance: db9a6b36-6c53-4769-b93e-3c38b95533d6] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1601.375372] env[68443]: DEBUG nova.compute.manager [None req-f8f671bb-d719-4c2c-9d33-da95afa70cda tempest-ServerRescueTestJSON-902940739 tempest-ServerRescueTestJSON-902940739-project-member] [instance: db9a6b36-6c53-4769-b93e-3c38b95533d6] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1601.402040] env[68443]: DEBUG oslo_concurrency.lockutils [None req-f8f671bb-d719-4c2c-9d33-da95afa70cda tempest-ServerRescueTestJSON-902940739 tempest-ServerRescueTestJSON-902940739-project-member] Lock "db9a6b36-6c53-4769-b93e-3c38b95533d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.126s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.403857] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7951873-8a4a-4e87-9d38-a6460e441320 tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.404934] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 325.312s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.405257] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: fd0de9a2-7a54-46be-8b6a-3415366e110c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1601.405531] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "fd0de9a2-7a54-46be-8b6a-3415366e110c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.411366] env[68443]: DEBUG nova.compute.manager [None req-bfcc8c0d-38f3-4400-9622-5338e770e1eb tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] [instance: 91e6d317-9322-4938-a1da-f88d36499c7b] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1601.437308] env[68443]: DEBUG nova.compute.manager [None req-bfcc8c0d-38f3-4400-9622-5338e770e1eb tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] [instance: 91e6d317-9322-4938-a1da-f88d36499c7b] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1601.457576] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bfcc8c0d-38f3-4400-9622-5338e770e1eb tempest-AttachVolumeTestJSON-1284107966 tempest-AttachVolumeTestJSON-1284107966-project-member] Lock "91e6d317-9322-4938-a1da-f88d36499c7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.931s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.466901] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1601.517854] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.518048] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.519693] env[68443]: INFO nova.compute.claims [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1601.733043] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be003046-b6d4-443f-b999-c1edabca8c75 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.740695] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41382214-b095-448f-9c2a-fe9131450548 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.768987] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f63c1d2-4b65-4588-8dd1-feb84a6fba10 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.775555] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7fc1a7-3a7f-48b8-947b-21925feae177 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.787895] env[68443]: DEBUG nova.compute.provider_tree [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.797694] env[68443]: DEBUG nova.scheduler.client.report [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1601.810386] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.810839] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1601.844398] env[68443]: DEBUG nova.compute.utils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1601.845819] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1601.845990] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1601.855207] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1601.901626] env[68443]: DEBUG nova.policy [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eb39994728c486ab572c6fd7acd1bb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9d22d78a3f8410c858ba3f85fb453c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1601.916239] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1601.941154] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1601.941391] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1601.941565] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1601.941761] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1601.941924] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1601.942060] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1601.942271] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1601.942428] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1601.942588] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1601.942749] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1601.942923] env[68443]: DEBUG nova.virt.hardware [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1601.943809] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8b46d5-4c41-49d9-b1dc-eac3c4ee6bdf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.951273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c9972f-64f7-4d1e-a9b8-97a6771c4ad9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.214361] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Successfully created port: b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1602.824567] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Successfully updated port: b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1602.832748] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.832883] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.833040] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1602.876976] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1603.034149] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Updating instance_info_cache with network_info: [{"id": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "address": "fa:16:3e:e3:f9:b9", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6261bb4-f4", "ovs_interfaceid": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.045848] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.046165] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance network_info: |[{"id": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "address": "fa:16:3e:e3:f9:b9", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6261bb4-f4", "ovs_interfaceid": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1603.046573] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f9:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.054390] env[68443]: DEBUG oslo.service.loopingcall [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.054773] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1603.055016] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1680c486-5594-4c4f-907a-1984626b0183 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.075462] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.075462] env[68443]: value = "task-3374026" [ 1603.075462] env[68443]: _type = "Task" [ 1603.075462] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.083905] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374026, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.110680] env[68443]: DEBUG nova.compute.manager [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Received event network-vif-plugged-b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1603.110680] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Acquiring lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.110793] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.110965] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.111168] env[68443]: DEBUG nova.compute.manager [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] No waiting events found dispatching network-vif-plugged-b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1603.111349] env[68443]: WARNING nova.compute.manager [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Received unexpected event network-vif-plugged-b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b for instance with vm_state building and task_state spawning. [ 1603.111535] env[68443]: DEBUG nova.compute.manager [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Received event network-changed-b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1603.111726] env[68443]: DEBUG nova.compute.manager [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Refreshing instance network info cache due to event network-changed-b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1603.111923] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Acquiring lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.112096] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Acquired lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.112320] env[68443]: DEBUG nova.network.neutron [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Refreshing network info cache for port b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1603.365657] env[68443]: DEBUG nova.network.neutron [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Updated VIF entry in instance network info cache for port b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1603.366201] env[68443]: DEBUG nova.network.neutron [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Updating instance_info_cache with network_info: [{"id": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "address": "fa:16:3e:e3:f9:b9", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6261bb4-f4", "ovs_interfaceid": "b6261bb4-f4b9-4c9c-8e77-a4a9a4ba312b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.376653] env[68443]: DEBUG oslo_concurrency.lockutils [req-bcea58a2-26a5-480d-85e8-531e3c30c139 req-12301b36-b966-4df1-9bfd-4e232c26744b service nova] Releasing lock "refresh_cache-04fc458a-a928-43ef-8fd0-bfc49989d2b1" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.585980] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374026, 'name': CreateVM_Task, 'duration_secs': 0.302789} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.586154] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1603.586860] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.587074] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.587436] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1603.587738] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90a8708c-ebe6-4e05-ba33-80439dbe8313 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.592080] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1603.592080] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52563588-d6e3-3036-4f13-a041e9ea0dfc" [ 1603.592080] env[68443]: _type = "Task" [ 1603.592080] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.599255] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52563588-d6e3-3036-4f13-a041e9ea0dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.103680] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.103990] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1604.104249] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.022510] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.160272] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.160556] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.340818] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.341069] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.824790] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1628.825213] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.826383] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.824604] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1631.824869] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1631.824996] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1631.847532] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.847827] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.847869] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848020] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848159] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848276] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848397] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848514] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848630] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848745] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1631.848863] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1631.849402] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.824781] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.825106] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1633.825215] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.837111] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.837349] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.837518] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.837674] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1633.839152] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9876f382-7553-4fc9-a4c5-73ba0cdeae80 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.848590] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e040e3cd-0b18-451d-aa97-a3ef349c9c3b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.862341] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dddce01-0f01-4db6-8ccc-cf63e9d8c64c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.868724] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046ef8f4-ff8b-4c4f-bf8d-abf05f319709 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.898756] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180982MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1633.898942] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.899127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.973644] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.973807] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.973935] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974076] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974201] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974326] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974445] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974559] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974673] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.974785] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1633.986056] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1633.997712] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91194294-6f8a-4067-a7ed-610c9da3aec8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.009088] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 409e47f5-dea2-43a4-9ab6-475dc09fafb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.019224] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.028629] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.038900] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.048012] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.048241] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1634.048389] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1634.238346] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef46837b-47c3-4371-ba95-e320832c65d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.246194] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38bd36bf-414b-4395-8a1f-949cf4d5d168 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.276054] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30c6a2e-3a82-4257-aebc-6cd1cefdc1cd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.282790] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bc6bbb-c716-4f82-aa62-b856497651d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.295295] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.303143] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1634.316288] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1634.316463] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.417s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.317663] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.820787] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.842752] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.451064] env[68443]: WARNING oslo_vmware.rw_handles [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1647.451064] env[68443]: ERROR oslo_vmware.rw_handles [ 1647.451064] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1647.452973] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1647.453250] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Copying Virtual Disk [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/98a9daf9-4632-4e66-9585-453ca9d9721a/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1647.453580] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d73d7bcf-6775-4c8b-adf5-cd052f62f511 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.462024] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1647.462024] env[68443]: value = "task-3374027" [ 1647.462024] env[68443]: _type = "Task" [ 1647.462024] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.469894] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.972663] env[68443]: DEBUG oslo_vmware.exceptions [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1647.972812] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.973278] env[68443]: ERROR nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1647.973278] env[68443]: Faults: ['InvalidArgument'] [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Traceback (most recent call last): [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] yield resources [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self.driver.spawn(context, instance, image_meta, [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self._fetch_image_if_missing(context, vi) [ 1647.973278] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] image_cache(vi, tmp_image_ds_loc) [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] vm_util.copy_virtual_disk( [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] session._wait_for_task(vmdk_copy_task) [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return self.wait_for_task(task_ref) [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return evt.wait() [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] result = hub.switch() [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1647.973685] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return self.greenlet.switch() [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self.f(*self.args, **self.kw) [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] raise exceptions.translate_fault(task_info.error) [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Faults: ['InvalidArgument'] [ 1647.974054] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] [ 1647.974054] env[68443]: INFO nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Terminating instance [ 1647.975243] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.975504] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1647.975754] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8201a03-95cd-4e2f-82da-2c293a0ffe1c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.977930] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1647.978140] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1647.978922] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df804d1f-0458-4e8c-b3a7-a7ee4887056b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.985871] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1647.986094] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-274fc35e-d725-40c6-836c-441004a38e13 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.988273] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1647.988451] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1647.989422] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e373b70f-5c33-4673-8c06-a6a388b88c47 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.994296] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 1647.994296] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52cfca37-d6ab-b035-0d6a-b925d8359a50" [ 1647.994296] env[68443]: _type = "Task" [ 1647.994296] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.006440] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52cfca37-d6ab-b035-0d6a-b925d8359a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.059188] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1648.059410] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1648.059584] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleting the datastore file [datastore1] bcdc4f46-810d-4ed7-84f1-2db2c318f920 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1648.059860] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7461c39-3017-4ca1-840e-a7cc4e9ea524 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.065987] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1648.065987] env[68443]: value = "task-3374029" [ 1648.065987] env[68443]: _type = "Task" [ 1648.065987] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.073591] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.505089] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1648.505433] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1648.505615] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35afab63-ee15-49a3-aa98-233ac215b8e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.516341] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1648.516541] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Fetch image to [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1648.516714] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1648.517460] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda0d416-cfe5-48d8-b878-91b8c3c75ebe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.523675] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5de2ef-d396-47dd-aecf-3e5ab57c37a6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.533426] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3dc8c15-be84-4651-b273-da087c3a2777 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.562921] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cecc06-6ab6-486f-9b55-1cb3f8e98462 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.570797] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-de0b5d86-47fa-4a05-ac4a-9649237b079e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.574806] env[68443]: DEBUG oslo_vmware.api [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077002} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.575307] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1648.575491] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1648.575668] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1648.575841] env[68443]: INFO nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1648.577971] env[68443]: DEBUG nova.compute.claims [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1648.578153] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.578388] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1648.593439] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1648.644848] env[68443]: DEBUG oslo_vmware.rw_handles [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1648.705307] env[68443]: DEBUG oslo_vmware.rw_handles [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1648.705502] env[68443]: DEBUG oslo_vmware.rw_handles [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1648.852823] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9678da03-4eb1-4c9f-a89b-52c1529f6ccf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.860357] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b1cb1a-b51c-48ea-ba7e-e2a53b550e8e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.890401] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac60fb7a-534a-46a3-8716-55d9ff8b3898 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.896990] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531fb5ca-8443-4239-b3c8-9244684371d8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.909561] env[68443]: DEBUG nova.compute.provider_tree [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.918058] env[68443]: DEBUG nova.scheduler.client.report [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1648.930904] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.352s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.931444] env[68443]: ERROR nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1648.931444] env[68443]: Faults: ['InvalidArgument'] [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Traceback (most recent call last): [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self.driver.spawn(context, instance, image_meta, [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self._fetch_image_if_missing(context, vi) [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] image_cache(vi, tmp_image_ds_loc) [ 1648.931444] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] vm_util.copy_virtual_disk( [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] session._wait_for_task(vmdk_copy_task) [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return self.wait_for_task(task_ref) [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return evt.wait() [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] result = hub.switch() [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] return self.greenlet.switch() [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1648.931801] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] self.f(*self.args, **self.kw) [ 1648.932157] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1648.932157] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] raise exceptions.translate_fault(task_info.error) [ 1648.932157] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1648.932157] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Faults: ['InvalidArgument'] [ 1648.932157] env[68443]: ERROR nova.compute.manager [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] [ 1648.932157] env[68443]: DEBUG nova.compute.utils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1648.933452] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Build of instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 was re-scheduled: A specified parameter was not correct: fileType [ 1648.933452] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1648.933814] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1648.933985] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1648.934169] env[68443]: DEBUG nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1648.934331] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1649.228316] env[68443]: DEBUG nova.network.neutron [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.241693] env[68443]: INFO nova.compute.manager [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Took 0.31 seconds to deallocate network for instance. [ 1649.347625] env[68443]: INFO nova.scheduler.client.report [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted allocations for instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 [ 1649.374026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-3066d8b9-0cf5-413b-a33f-f5ae9703a569 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.932s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.374026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.650s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.374026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.374840] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.374840] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.380706] env[68443]: INFO nova.compute.manager [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Terminating instance [ 1649.383356] env[68443]: DEBUG nova.compute.manager [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1649.383471] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1649.383744] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20b46ad7-55cf-4fae-91b6-fe362b7fc4be {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.389018] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1649.397756] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bfafe3-8523-4cf8-b739-2ffa1e4c100d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.427097] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bcdc4f46-810d-4ed7-84f1-2db2c318f920 could not be found. [ 1649.427337] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1649.427518] env[68443]: INFO nova.compute.manager [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1649.427781] env[68443]: DEBUG oslo.service.loopingcall [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.430333] env[68443]: DEBUG nova.compute.manager [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1649.430462] env[68443]: DEBUG nova.network.neutron [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1649.446185] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.446439] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.447946] env[68443]: INFO nova.compute.claims [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1649.460019] env[68443]: DEBUG nova.network.neutron [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.469950] env[68443]: INFO nova.compute.manager [-] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] Took 0.04 seconds to deallocate network for instance. [ 1649.572742] env[68443]: DEBUG oslo_concurrency.lockutils [None req-32fc225b-4ecf-4e97-8c6e-0d0b98c8ea59 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.573635] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 373.480s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.573833] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: bcdc4f46-810d-4ed7-84f1-2db2c318f920] During sync_power_state the instance has a pending task (deleting). Skip. [ 1649.574020] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "bcdc4f46-810d-4ed7-84f1-2db2c318f920" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.698439] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a50a5fc-38f1-4412-8ebe-f27691cfd39f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.705699] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2306ec-51ac-446b-8961-c63502ac6d37 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.734598] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b574e9-31fe-4c93-b1dc-e5e7b42c2425 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.741749] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a62c5a-e91f-49b9-ae17-12dd217aa51e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.754360] env[68443]: DEBUG nova.compute.provider_tree [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1649.763726] env[68443]: DEBUG nova.scheduler.client.report [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1649.777686] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.331s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.778144] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1649.811289] env[68443]: DEBUG nova.compute.utils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1649.812656] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1649.812833] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1649.820945] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1649.874985] env[68443]: DEBUG nova.policy [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5394eee9936641f986136eee619d6c2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d4692d4df3948b98eae443eebb5239b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1649.882122] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1649.905737] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1649.905987] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1649.906162] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1649.906344] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1649.906487] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1649.906633] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1649.906831] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1649.906986] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1649.907162] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1649.907321] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1649.907522] env[68443]: DEBUG nova.virt.hardware [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1649.908372] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2df2f9-cae3-4787-907e-a474e0c04654 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.916124] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7227ca1a-afe1-4972-a5b8-de146be57412 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.162688] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Successfully created port: f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1650.753411] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Successfully updated port: f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1650.765648] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.766326] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.766326] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1650.812764] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1650.981513] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Updating instance_info_cache with network_info: [{"id": "f54de8ef-4145-4afe-b464-89e3401c5e20", "address": "fa:16:3e:fa:02:db", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54de8ef-41", "ovs_interfaceid": "f54de8ef-4145-4afe-b464-89e3401c5e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.995625] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.995991] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance network_info: |[{"id": "f54de8ef-4145-4afe-b464-89e3401c5e20", "address": "fa:16:3e:fa:02:db", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54de8ef-41", "ovs_interfaceid": "f54de8ef-4145-4afe-b464-89e3401c5e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1650.996720] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:02:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f54de8ef-4145-4afe-b464-89e3401c5e20', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1651.004908] env[68443]: DEBUG oslo.service.loopingcall [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1651.005386] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1651.005619] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-395a295d-b2d7-4fac-8f10-56429b63bda2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.026567] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1651.026567] env[68443]: value = "task-3374030" [ 1651.026567] env[68443]: _type = "Task" [ 1651.026567] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.035118] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374030, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.366757] env[68443]: DEBUG nova.compute.manager [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Received event network-vif-plugged-f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1651.366980] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Acquiring lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.367221] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.367396] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.367788] env[68443]: DEBUG nova.compute.manager [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] No waiting events found dispatching network-vif-plugged-f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1651.368183] env[68443]: WARNING nova.compute.manager [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Received unexpected event network-vif-plugged-f54de8ef-4145-4afe-b464-89e3401c5e20 for instance with vm_state building and task_state spawning. [ 1651.368411] env[68443]: DEBUG nova.compute.manager [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Received event network-changed-f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1651.368623] env[68443]: DEBUG nova.compute.manager [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Refreshing instance network info cache due to event network-changed-f54de8ef-4145-4afe-b464-89e3401c5e20. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1651.368860] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Acquiring lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.369053] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Acquired lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.369267] env[68443]: DEBUG nova.network.neutron [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Refreshing network info cache for port f54de8ef-4145-4afe-b464-89e3401c5e20 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1651.536906] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374030, 'name': CreateVM_Task, 'duration_secs': 0.372217} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.537311] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1651.538085] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.538259] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.538613] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1651.538873] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cc6d795-89f5-4efd-871e-e1698941e641 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.543461] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 1651.543461] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52dded3a-81e0-398a-51d4-d37f9342d5df" [ 1651.543461] env[68443]: _type = "Task" [ 1651.543461] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.551077] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52dded3a-81e0-398a-51d4-d37f9342d5df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.802359] env[68443]: DEBUG nova.network.neutron [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Updated VIF entry in instance network info cache for port f54de8ef-4145-4afe-b464-89e3401c5e20. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1651.802730] env[68443]: DEBUG nova.network.neutron [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Updating instance_info_cache with network_info: [{"id": "f54de8ef-4145-4afe-b464-89e3401c5e20", "address": "fa:16:3e:fa:02:db", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf54de8ef-41", "ovs_interfaceid": "f54de8ef-4145-4afe-b464-89e3401c5e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.812064] env[68443]: DEBUG oslo_concurrency.lockutils [req-c1a37d69-4726-4bff-b4c8-2f04ea602b06 req-45bf95b8-d3fa-4028-8620-4f9adda9c6d5 service nova] Releasing lock "refresh_cache-2985403d-348f-473d-ad1f-75fb67d3be12" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.054140] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.054355] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1652.054566] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.201201] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "2985403d-348f-473d-ad1f-75fb67d3be12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.024569] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "12b39079-051e-4997-9fa1-7e467af04306" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.024569] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.825718] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.826079] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.826025] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.826398] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1692.826398] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1692.848814] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849150] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849311] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849444] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849571] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849693] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849812] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.849930] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.850068] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.850189] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1692.850309] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1692.850782] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.850955] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.824883] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.842058] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.842058] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.842058] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.842058] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1693.842058] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b7b39d-8e56-485f-808a-f9fa8c79761c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.851500] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ca14f3-63a3-416d-b26d-dbeb052b8c40 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.865731] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db13ea61-a251-4f97-890f-56277ebc1972 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.872342] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbbec1c-f0e7-4388-9eba-d5212d829a90 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.902424] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181000MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1693.902826] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.903178] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.974281] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 91fd9c10-db96-4366-9548-13b36f94db6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.974454] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.974584] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.974708] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.974829] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.974949] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.975081] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.975202] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.975318] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.975430] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1693.986644] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1693.998612] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.009008] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.019150] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.028638] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.028861] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1694.029017] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1694.201474] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339e2f68-d41c-43d9-88ce-902d0c615c7c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.209167] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6356e450-b16d-431f-ac0d-30ab34945027 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.238953] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6f636f-4c23-4546-b655-a7d1f9ad7c69 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.245812] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b49aea-ea35-4f52-8dfa-adb4b9900568 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.259644] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.270550] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1694.284577] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1694.284577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.381s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.285058] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.285058] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1697.821483] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.825016] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.872179] env[68443]: WARNING oslo_vmware.rw_handles [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1697.872179] env[68443]: ERROR oslo_vmware.rw_handles [ 1697.872179] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1697.874254] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1697.874824] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Copying Virtual Disk [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/37747e88-143c-4fab-832d-24bc6b431ab6/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1697.874824] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3a9db18d-6b0d-4d14-8c5c-b130fcc5f1e6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.884096] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 1697.884096] env[68443]: value = "task-3374031" [ 1697.884096] env[68443]: _type = "Task" [ 1697.884096] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.891861] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.395248] env[68443]: DEBUG oslo_vmware.exceptions [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1698.395608] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.396175] env[68443]: ERROR nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1698.396175] env[68443]: Faults: ['InvalidArgument'] [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Traceback (most recent call last): [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] yield resources [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self.driver.spawn(context, instance, image_meta, [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self._fetch_image_if_missing(context, vi) [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1698.396175] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] image_cache(vi, tmp_image_ds_loc) [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] vm_util.copy_virtual_disk( [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] session._wait_for_task(vmdk_copy_task) [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return self.wait_for_task(task_ref) [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return evt.wait() [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] result = hub.switch() [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return self.greenlet.switch() [ 1698.396576] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self.f(*self.args, **self.kw) [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] raise exceptions.translate_fault(task_info.error) [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Faults: ['InvalidArgument'] [ 1698.396959] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] [ 1698.396959] env[68443]: INFO nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Terminating instance [ 1698.398026] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.398254] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1698.398865] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1698.399065] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1698.399314] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7abe71f4-c739-43ac-90a7-3705624c5ada {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.401480] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983169af-d381-4670-bdfa-de7e0222d8ec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.408383] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1698.408590] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44021956-db4d-498f-a87d-b1948d6836ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.410595] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1698.410764] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1698.411733] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2f77bb4-8d17-46a7-988d-a9256ef313e5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.416280] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1698.416280] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520869fe-d5a8-5436-be52-ee7328e43780" [ 1698.416280] env[68443]: _type = "Task" [ 1698.416280] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.423284] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520869fe-d5a8-5436-be52-ee7328e43780, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.478355] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1698.478561] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1698.478740] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleting the datastore file [datastore1] 91fd9c10-db96-4366-9548-13b36f94db6b {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.478996] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c748c002-1c52-4d3b-9f10-aeb6c0981e71 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.484776] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 1698.484776] env[68443]: value = "task-3374033" [ 1698.484776] env[68443]: _type = "Task" [ 1698.484776] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.492021] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374033, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.926934] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1698.927311] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Creating directory with path [datastore1] vmware_temp/53d2ac3d-15b6-48ea-8a73-ef4b7ccbd976/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1698.927425] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8e00c48-bd36-4ee4-ac7a-e4188593ca71 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.938519] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Created directory with path [datastore1] vmware_temp/53d2ac3d-15b6-48ea-8a73-ef4b7ccbd976/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1698.938688] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Fetch image to [datastore1] vmware_temp/53d2ac3d-15b6-48ea-8a73-ef4b7ccbd976/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1698.938858] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/53d2ac3d-15b6-48ea-8a73-ef4b7ccbd976/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1698.939588] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c270cb-41b4-488a-a193-bd27a5d4f385 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.945645] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f1c6a9-6395-4fb4-b58f-79a0c211d197 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.954405] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc959883-2628-41a2-8925-62269997086c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.985405] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ba7baa-dff1-429b-ae50-47991cedea00 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.997242] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ad72f34b-9638-421d-82f9-b602a52b0a51 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.999086] env[68443]: DEBUG oslo_vmware.api [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374033, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078694} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.999373] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1698.999563] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1698.999732] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1698.999902] env[68443]: INFO nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1699.002391] env[68443]: DEBUG nova.compute.claims [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1699.002559] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.002771] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.019499] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1699.181148] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1699.181939] env[68443]: ERROR nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1699.181939] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1699.182464] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] yield resources [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.driver.spawn(context, instance, image_meta, [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._fetch_image_if_missing(context, vi) [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image_fetch(context, vi, tmp_image_ds_loc) [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] images.fetch_image( [ 1699.182964] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] metadata = IMAGE_API.get(context, image_ref) [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return session.show(context, image_id, [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] _reraise_translated_image_exception(image_id) [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise new_exc.with_traceback(exc_trace) [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1699.183560] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1699.184084] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1699.184382] env[68443]: INFO nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Terminating instance [ 1699.184382] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.184382] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1699.188205] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1699.188404] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1699.188658] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6df954cf-3750-4592-a062-682d6ea9b51b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.194198] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefc11f4-84dd-40e7-8760-115dddb11823 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.203935] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1699.205288] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f533306-ae3d-4898-a731-c171864de626 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.206560] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1699.206763] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1699.207598] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d7f880a-bf15-4a42-9292-cc2e17ea6db1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.213676] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for the task: (returnval){ [ 1699.213676] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52f410e1-fe51-7df8-9e18-98c2509d9e94" [ 1699.213676] env[68443]: _type = "Task" [ 1699.213676] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.223845] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52f410e1-fe51-7df8-9e18-98c2509d9e94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.248024] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbd7567-6e52-48f8-95d4-02008b50e088 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.255198] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3e406b-d444-4720-b823-bdb8ad37796d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.285683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028ee4f6-70ac-4fcf-bdb6-af46241d2891 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.288333] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1699.288531] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1699.288707] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleting the datastore file [datastore1] 75e3d9b0-4317-4e6e-9f2b-d32134f7223f {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1699.288944] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42208062-910c-4236-bf2b-112003b6a6aa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.296975] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfa52f8-2920-4ffb-87f9-93ae4d3c64a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.300740] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for the task: (returnval){ [ 1699.300740] env[68443]: value = "task-3374035" [ 1699.300740] env[68443]: _type = "Task" [ 1699.300740] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.312664] env[68443]: DEBUG nova.compute.provider_tree [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.320434] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': task-3374035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.323489] env[68443]: DEBUG nova.scheduler.client.report [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1699.337582] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.338103] env[68443]: ERROR nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1699.338103] env[68443]: Faults: ['InvalidArgument'] [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Traceback (most recent call last): [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self.driver.spawn(context, instance, image_meta, [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self._fetch_image_if_missing(context, vi) [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] image_cache(vi, tmp_image_ds_loc) [ 1699.338103] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] vm_util.copy_virtual_disk( [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] session._wait_for_task(vmdk_copy_task) [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return self.wait_for_task(task_ref) [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return evt.wait() [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] result = hub.switch() [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] return self.greenlet.switch() [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1699.338536] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] self.f(*self.args, **self.kw) [ 1699.338884] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1699.338884] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] raise exceptions.translate_fault(task_info.error) [ 1699.338884] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1699.338884] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Faults: ['InvalidArgument'] [ 1699.338884] env[68443]: ERROR nova.compute.manager [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] [ 1699.338884] env[68443]: DEBUG nova.compute.utils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1699.340581] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Build of instance 91fd9c10-db96-4366-9548-13b36f94db6b was re-scheduled: A specified parameter was not correct: fileType [ 1699.340581] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1699.340958] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1699.341144] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1699.341328] env[68443]: DEBUG nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1699.341494] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1699.659309] env[68443]: DEBUG nova.network.neutron [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.670859] env[68443]: INFO nova.compute.manager [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Took 0.33 seconds to deallocate network for instance. [ 1699.730209] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1699.730209] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Creating directory with path [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1699.730209] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4c21469-cda1-49c7-9c64-0d865f439f18 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.741711] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Created directory with path [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1699.741935] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Fetch image to [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1699.742112] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1699.742873] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df13614-661c-437a-87e0-e4857bf144e7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.750032] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0984b5-61b5-4204-a281-6ebee20dc67c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.763683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8911ab8a-273a-4e20-ba39-0462a75a104d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.797914] env[68443]: INFO nova.scheduler.client.report [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted allocations for instance 91fd9c10-db96-4366-9548-13b36f94db6b [ 1699.804047] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20598a4-fa53-4106-800d-347bd070eda7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.820072] env[68443]: DEBUG oslo_vmware.api [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Task: {'id': task-3374035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077491} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.820668] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1699.820868] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1699.821056] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1699.821329] env[68443]: INFO nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1699.822970] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8b828406-12d7-4556-a725-f544a99d6da3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.825277] env[68443]: DEBUG nova.compute.claims [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1699.825457] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.825673] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.829500] env[68443]: DEBUG oslo_concurrency.lockutils [None req-68f0b799-3bb5-4138-9270-5d10c2e25383 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.691s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.830562] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.989s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.830805] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.831037] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.831215] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.833201] env[68443]: INFO nova.compute.manager [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Terminating instance [ 1699.837281] env[68443]: DEBUG nova.compute.manager [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1699.837586] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1699.837745] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6711111a-7725-457e-b172-1e6150882c38 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.846385] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b331846-fe16-49f5-a5f3-e29fd50dc622 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.857460] env[68443]: DEBUG nova.compute.manager [None req-8dd826de-cfd2-4e21-95ad-cfd3609251aa tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] [instance: 91194294-6f8a-4067-a7ed-610c9da3aec8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1699.861788] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1699.885134] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91fd9c10-db96-4366-9548-13b36f94db6b could not be found. [ 1699.885134] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1699.885134] env[68443]: INFO nova.compute.manager [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1699.885134] env[68443]: DEBUG oslo.service.loopingcall [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1699.885134] env[68443]: DEBUG nova.compute.manager [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1699.885383] env[68443]: DEBUG nova.network.neutron [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1699.911327] env[68443]: DEBUG nova.compute.manager [None req-8dd826de-cfd2-4e21-95ad-cfd3609251aa tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] [instance: 91194294-6f8a-4067-a7ed-610c9da3aec8] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1699.924892] env[68443]: DEBUG nova.network.neutron [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.934570] env[68443]: INFO nova.compute.manager [-] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] Took 0.05 seconds to deallocate network for instance. [ 1699.941520] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8dd826de-cfd2-4e21-95ad-cfd3609251aa tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Lock "91194294-6f8a-4067-a7ed-610c9da3aec8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.833s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.946755] env[68443]: DEBUG oslo_vmware.rw_handles [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1700.014019] env[68443]: DEBUG nova.compute.manager [None req-17f756af-e0ad-4f6d-902b-98b8dd62dd25 tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] [instance: 409e47f5-dea2-43a4-9ab6-475dc09fafb4] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1700.020269] env[68443]: DEBUG oslo_vmware.rw_handles [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1700.020469] env[68443]: DEBUG oslo_vmware.rw_handles [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1700.036889] env[68443]: DEBUG nova.compute.manager [None req-17f756af-e0ad-4f6d-902b-98b8dd62dd25 tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] [instance: 409e47f5-dea2-43a4-9ab6-475dc09fafb4] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1700.059855] env[68443]: DEBUG oslo_concurrency.lockutils [None req-17f756af-e0ad-4f6d-902b-98b8dd62dd25 tempest-ListImageFiltersTestJSON-1717746758 tempest-ListImageFiltersTestJSON-1717746758-project-member] Lock "409e47f5-dea2-43a4-9ab6-475dc09fafb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.705s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.075309] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1700.082440] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1d76cf02-66ee-4c7a-b820-374cb1983ae5 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.252s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.083783] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 423.990s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.084641] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 91fd9c10-db96-4366-9548-13b36f94db6b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1700.084901] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "91fd9c10-db96-4366-9548-13b36f94db6b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.125710] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.140746] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82277460-e497-4a81-8499-28541f3650d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.148531] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58399626-1a4b-44b6-a651-b568f2a80168 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.179218] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b8b179-427d-44cc-b576-0e03ee88154f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.187237] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d44eac-df25-4766-98ff-65b1d5b6726d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.200125] env[68443]: DEBUG nova.compute.provider_tree [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.210332] env[68443]: DEBUG nova.scheduler.client.report [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.223846] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.398s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.224556] env[68443]: ERROR nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1700.224556] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.224865] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.driver.spawn(context, instance, image_meta, [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._fetch_image_if_missing(context, vi) [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image_fetch(context, vi, tmp_image_ds_loc) [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] images.fetch_image( [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] metadata = IMAGE_API.get(context, image_ref) [ 1700.225162] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return session.show(context, image_id, [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] _reraise_translated_image_exception(image_id) [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise new_exc.with_traceback(exc_trace) [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1700.226077] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1700.226583] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.226583] env[68443]: DEBUG nova.compute.utils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1700.227052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.101s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.227669] env[68443]: INFO nova.compute.claims [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1700.230252] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Build of instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f was re-scheduled: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1700.230786] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1700.230961] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1700.231132] env[68443]: DEBUG nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1700.231298] env[68443]: DEBUG nova.network.neutron [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1700.338046] env[68443]: DEBUG neutronclient.v2_0.client [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1700.338847] env[68443]: ERROR nova.compute.manager [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1700.338847] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.339185] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.driver.spawn(context, instance, image_meta, [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._fetch_image_if_missing(context, vi) [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image_fetch(context, vi, tmp_image_ds_loc) [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] images.fetch_image( [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] metadata = IMAGE_API.get(context, image_ref) [ 1700.339685] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return session.show(context, image_id, [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] _reraise_translated_image_exception(image_id) [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise new_exc.with_traceback(exc_trace) [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = getattr(controller, method)(*args, **kwargs) [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._get(image_id) [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1700.340059] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] resp, body = self.http_client.get(url, headers=header) [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.request(url, 'GET', **kwargs) [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self._handle_response(resp) [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exc.from_response(resp, resp.content) [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.340434] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._build_and_run_instance(context, instance, image, [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exception.RescheduledException( [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.RescheduledException: Build of instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f was re-scheduled: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1700.340784] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] exception_handler_v20(status_code, error_body) [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise client_exc(message=error_message, [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Neutron server returns request_ids: ['req-9e2825da-d8c7-40bc-af5c-7be3aa497773'] [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._deallocate_network(context, instance, requested_networks) [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.network_api.deallocate_for_instance( [ 1700.341168] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] data = neutron.list_ports(**search_opts) [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.list('ports', self.ports_path, retrieve_all, [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] for r in self._pagination(collection, path, **params): [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] res = self.get(path, params=params) [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.341602] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.retry_request("GET", action, body=body, [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.do_request(method, action, body=body, [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._handle_fault_response(status_code, replybody, resp) [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exception.Unauthorized() [ 1700.341967] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.Unauthorized: Not authorized. [ 1700.342334] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.394035] env[68443]: INFO nova.scheduler.client.report [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Deleted allocations for instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f [ 1700.419064] env[68443]: DEBUG oslo_concurrency.lockutils [None req-2b8609c0-49a5-4071-a516-fc51e64d0c1b tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 569.186s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.419064] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 372.320s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.419064] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Acquiring lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.419287] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.419287] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.420664] env[68443]: INFO nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Terminating instance [ 1700.422775] env[68443]: DEBUG nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1700.423089] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1700.426050] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a8e5ba4-3dcc-4dd1-aea1-271edf3499ef {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.429377] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1700.440389] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea16b52-0abd-4a90-848f-a5dfd8790b60 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.476022] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 75e3d9b0-4317-4e6e-9f2b-d32134f7223f could not be found. [ 1700.476253] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1700.476435] env[68443]: INFO nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1700.476685] env[68443]: DEBUG oslo.service.loopingcall [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.481587] env[68443]: DEBUG nova.compute.manager [-] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1700.481681] env[68443]: DEBUG nova.network.neutron [-] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1700.498823] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.503030] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c6c4b7-3e62-47a7-9f38-71b2c3992619 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.510394] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06005aa0-63d3-4530-a8e5-de453d5c3340 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.545526] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f0dbcd-79cf-4351-8d68-bf37715a6e94 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.555465] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ea8d30-e1ad-466d-b978-b6332e71e8b0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.569961] env[68443]: DEBUG nova.compute.provider_tree [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.587021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7fec8f2-b8fa-4881-9de9-51d40c3ac3ad tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.587288] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7fec8f2-b8fa-4881-9de9-51d40c3ac3ad tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.588066] env[68443]: DEBUG nova.scheduler.client.report [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.592707] env[68443]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1700.592961] env[68443]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-f6f4eb0c-d6b5-4c5b-978a-3b02c53e5f00'] [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1700.593519] env[68443]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1700.593970] env[68443]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.594486] env[68443]: ERROR oslo.service.loopingcall [ 1700.594917] env[68443]: ERROR nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.604486] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.604989] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1700.607315] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.109s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.608679] env[68443]: INFO nova.compute.claims [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1700.636676] env[68443]: ERROR nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] exception_handler_v20(status_code, error_body) [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise client_exc(message=error_message, [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Neutron server returns request_ids: ['req-f6f4eb0c-d6b5-4c5b-978a-3b02c53e5f00'] [ 1700.636676] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] During handling of the above exception, another exception occurred: [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Traceback (most recent call last): [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._delete_instance(context, instance, bdms) [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._shutdown_instance(context, instance, bdms) [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._try_deallocate_network(context, instance, requested_networks) [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] with excutils.save_and_reraise_exception(): [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.637488] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.force_reraise() [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise self.value [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] _deallocate_network_with_retries() [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return evt.wait() [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = hub.switch() [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.greenlet.switch() [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1700.638092] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = func(*self.args, **self.kw) [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] result = f(*args, **kwargs) [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._deallocate_network( [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self.network_api.deallocate_for_instance( [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] data = neutron.list_ports(**search_opts) [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.list('ports', self.ports_path, retrieve_all, [ 1700.638673] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] for r in self._pagination(collection, path, **params): [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] res = self.get(path, params=params) [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.retry_request("GET", action, body=body, [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1700.639313] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] return self.do_request(method, action, body=body, [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] ret = obj(*args, **kwargs) [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] self._handle_fault_response(status_code, replybody, resp) [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.639932] env[68443]: ERROR nova.compute.manager [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] [ 1700.639932] env[68443]: DEBUG nova.compute.utils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.641282] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Not allocating networking since 'none' was specified. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1700.664566] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Lock "75e3d9b0-4317-4e6e-9f2b-d32134f7223f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.247s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.677207] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1700.727584] env[68443]: INFO nova.compute.manager [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] [instance: 75e3d9b0-4317-4e6e-9f2b-d32134f7223f] Successfully reverted task state from None on failure for instance. [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server [None req-0ee75a4e-dd68-4812-8cad-94f8bae9d141 tempest-MigrationsAdminTest-1458896987 tempest-MigrationsAdminTest-1458896987-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-f6f4eb0c-d6b5-4c5b-978a-3b02c53e5f00'] [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1700.733604] env[68443]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.734257] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1700.734764] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1700.735184] env[68443]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.735620] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1700.736110] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1700.736597] env[68443]: ERROR oslo_messaging.rpc.server [ 1700.746186] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1700.772151] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1700.772396] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1700.772554] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.772750] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1700.772903] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.773062] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1700.773269] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1700.773715] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1700.773930] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1700.774119] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1700.774305] env[68443]: DEBUG nova.virt.hardware [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1700.775150] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955238ce-148a-40e4-8540-4e8914627f0b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.784937] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf94bdbe-77f9-46a0-a728-339c53fe34d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.800652] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance VIF info [] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1700.806213] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Creating folder: Project (fa58d0a06ee94a1bb5d22cdfb3a55bf9). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1700.808636] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6600e78-1266-4911-83bc-8e6e371d9d8f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.817973] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Created folder: Project (fa58d0a06ee94a1bb5d22cdfb3a55bf9) in parent group-v673136. [ 1700.818163] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Creating folder: Instances. Parent ref: group-v673221. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1700.820529] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90748fb0-4cc5-4a09-ae7b-9b692deb0dfd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.828521] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Created folder: Instances in parent group-v673221. [ 1700.828761] env[68443]: DEBUG oslo.service.loopingcall [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.828945] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1700.829150] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d24f0cf7-3d1b-4192-a8ce-6931d08c7fbd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.846965] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1700.846965] env[68443]: value = "task-3374038" [ 1700.846965] env[68443]: _type = "Task" [ 1700.846965] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.854836] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374038, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.890445] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea02be0-e4a2-4c75-a6c8-bc56b300939b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.898594] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beaff0fa-6d61-493d-875e-0d0563aa09d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.930355] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214a261f-789b-4c2d-888c-1a1ed9afc504 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.937530] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f18f2a-322b-4339-862b-2bfa32a94434 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.950796] env[68443]: DEBUG nova.compute.provider_tree [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1700.961026] env[68443]: DEBUG nova.scheduler.client.report [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.973547] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.366s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.974252] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1701.009458] env[68443]: DEBUG nova.compute.utils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1701.010839] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1701.011009] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1701.023355] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1701.087427] env[68443]: DEBUG nova.policy [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '78756bf02577427ead18a9c766a8219b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf69652ce2414eab92a9ce2b6cc910b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1701.110678] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1701.133797] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.134086] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.134254] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.134495] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.134574] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.134723] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.135011] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.135767] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.136083] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.136333] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.136591] env[68443]: DEBUG nova.virt.hardware [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.137754] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb71bc66-91c3-452b-a086-d19b34928944 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.145983] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233a501b-d8e1-424c-92fa-6f578e62bc06 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.357940] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374038, 'name': CreateVM_Task, 'duration_secs': 0.303402} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.358388] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1701.358855] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.359015] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.359390] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1701.359642] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d242b34-a814-4eb6-9296-a04c74feaf77 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.364490] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for the task: (returnval){ [ 1701.364490] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]525b326d-0ae9-65c1-8574-e11c192b29ed" [ 1701.364490] env[68443]: _type = "Task" [ 1701.364490] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.372604] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]525b326d-0ae9-65c1-8574-e11c192b29ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.383780] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Successfully created port: 70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1701.875918] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.875918] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1701.875918] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.131306] env[68443]: DEBUG nova.compute.manager [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Received event network-vif-plugged-70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1702.131559] env[68443]: DEBUG oslo_concurrency.lockutils [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] Acquiring lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.131770] env[68443]: DEBUG oslo_concurrency.lockutils [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.131912] env[68443]: DEBUG oslo_concurrency.lockutils [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.132082] env[68443]: DEBUG nova.compute.manager [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] No waiting events found dispatching network-vif-plugged-70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1702.132268] env[68443]: WARNING nova.compute.manager [req-58d99d1b-d900-461e-830f-764df1d31add req-ce9f822b-7a29-4f3c-a337-d087cb8158db service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Received unexpected event network-vif-plugged-70323f45-b589-44e4-9a7e-fd245af1b5c6 for instance with vm_state building and task_state spawning. [ 1702.168172] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Successfully updated port: 70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1702.184020] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.184020] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquired lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.184020] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1702.225963] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1702.387061] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Updating instance_info_cache with network_info: [{"id": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "address": "fa:16:3e:11:da:39", "network": {"id": "a8d932ea-6ac8-4faf-bcf3-ce95ba2f4627", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1920559404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf69652ce2414eab92a9ce2b6cc910b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70323f45-b5", "ovs_interfaceid": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.397550] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Releasing lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.397833] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance network_info: |[{"id": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "address": "fa:16:3e:11:da:39", "network": {"id": "a8d932ea-6ac8-4faf-bcf3-ce95ba2f4627", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1920559404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf69652ce2414eab92a9ce2b6cc910b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70323f45-b5", "ovs_interfaceid": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1702.398243] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:da:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70323f45-b589-44e4-9a7e-fd245af1b5c6', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.405613] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Creating folder: Project (bf69652ce2414eab92a9ce2b6cc910b8). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1702.406118] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f4df5e8-8888-485d-a4f8-df9ca598aaad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.416816] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Created folder: Project (bf69652ce2414eab92a9ce2b6cc910b8) in parent group-v673136. [ 1702.417009] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Creating folder: Instances. Parent ref: group-v673224. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1702.417235] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e2f096a-282b-49e0-a45c-d4c3f0d9e088 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.426690] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Created folder: Instances in parent group-v673224. [ 1702.426918] env[68443]: DEBUG oslo.service.loopingcall [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.427105] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1702.427308] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1871d507-47b8-40cf-a583-61246e348842 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.445263] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.445263] env[68443]: value = "task-3374041" [ 1702.445263] env[68443]: _type = "Task" [ 1702.445263] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.456937] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374041, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.956666] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374041, 'name': CreateVM_Task} progress is 25%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.455802] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374041, 'name': CreateVM_Task, 'duration_secs': 0.666098} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.455802] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1703.456220] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.456344] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.456652] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1703.456892] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b5b69d7-ebf8-45f9-9635-962f4801b4d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.461107] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for the task: (returnval){ [ 1703.461107] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52178e6e-dc9b-d5c2-9902-c8ddcdc801a3" [ 1703.461107] env[68443]: _type = "Task" [ 1703.461107] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.468150] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52178e6e-dc9b-d5c2-9902-c8ddcdc801a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.971381] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.971633] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1703.971846] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.155461] env[68443]: DEBUG nova.compute.manager [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Received event network-changed-70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1704.155657] env[68443]: DEBUG nova.compute.manager [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Refreshing instance network info cache due to event network-changed-70323f45-b589-44e4-9a7e-fd245af1b5c6. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1704.155875] env[68443]: DEBUG oslo_concurrency.lockutils [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] Acquiring lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.156028] env[68443]: DEBUG oslo_concurrency.lockutils [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] Acquired lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.156195] env[68443]: DEBUG nova.network.neutron [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Refreshing network info cache for port 70323f45-b589-44e4-9a7e-fd245af1b5c6 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1704.422838] env[68443]: DEBUG nova.network.neutron [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Updated VIF entry in instance network info cache for port 70323f45-b589-44e4-9a7e-fd245af1b5c6. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1704.423209] env[68443]: DEBUG nova.network.neutron [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Updating instance_info_cache with network_info: [{"id": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "address": "fa:16:3e:11:da:39", "network": {"id": "a8d932ea-6ac8-4faf-bcf3-ce95ba2f4627", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1920559404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf69652ce2414eab92a9ce2b6cc910b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70323f45-b5", "ovs_interfaceid": "70323f45-b589-44e4-9a7e-fd245af1b5c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.432265] env[68443]: DEBUG oslo_concurrency.lockutils [req-51f83b23-565f-4f1f-aa3c-1b75c9e2616c req-98ed9adc-4ddc-48bc-9353-605d5b74e824 service nova] Releasing lock "refresh_cache-b8c2916e-3b70-42c9-9f85-ee8582c636b8" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.251488] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.951616] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6333b256-471f-485d-b099-21fa82349319" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.951905] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.426048] env[68443]: DEBUG oslo_concurrency.lockutils [None req-38161715-85ae-4ca4-a0e2-94d39f38aae1 tempest-ServerTagsTestJSON-268279256 tempest-ServerTagsTestJSON-268279256-project-member] Acquiring lock "a81ad5e2-d4bb-4ef0-a268-c7012538821d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.426832] env[68443]: DEBUG oslo_concurrency.lockutils [None req-38161715-85ae-4ca4-a0e2-94d39f38aae1 tempest-ServerTagsTestJSON-268279256 tempest-ServerTagsTestJSON-268279256-project-member] Lock "a81ad5e2-d4bb-4ef0-a268-c7012538821d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.447597] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.220961] env[68443]: WARNING oslo_vmware.rw_handles [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1748.220961] env[68443]: ERROR oslo_vmware.rw_handles [ 1748.221637] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1748.223344] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1748.223588] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Copying Virtual Disk [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/c81c48d2-25be-460b-a46b-d661e07b7e83/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1748.223863] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dca039dc-a78e-4c82-a0f0-eeef301afc61 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.231907] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for the task: (returnval){ [ 1748.231907] env[68443]: value = "task-3374042" [ 1748.231907] env[68443]: _type = "Task" [ 1748.231907] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.239362] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Task: {'id': task-3374042, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.742396] env[68443]: DEBUG oslo_vmware.exceptions [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1748.742534] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1748.743089] env[68443]: ERROR nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1748.743089] env[68443]: Faults: ['InvalidArgument'] [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Traceback (most recent call last): [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] yield resources [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self.driver.spawn(context, instance, image_meta, [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self._fetch_image_if_missing(context, vi) [ 1748.743089] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] image_cache(vi, tmp_image_ds_loc) [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] vm_util.copy_virtual_disk( [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] session._wait_for_task(vmdk_copy_task) [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return self.wait_for_task(task_ref) [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return evt.wait() [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] result = hub.switch() [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1748.743479] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return self.greenlet.switch() [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self.f(*self.args, **self.kw) [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] raise exceptions.translate_fault(task_info.error) [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Faults: ['InvalidArgument'] [ 1748.743861] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] [ 1748.743861] env[68443]: INFO nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Terminating instance [ 1748.744969] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1748.745193] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1748.745429] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e69e271-7f78-43c9-8d9e-e0f26e5ab1f2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.747818] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1748.748018] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1748.748745] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c653a2b-c6b8-4347-a14c-1d09891afabd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.755381] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1748.755558] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8474303-ceda-43d6-bc55-c7bbf4084012 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.757689] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1748.757863] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1748.758802] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-229ca077-9023-471e-bec8-a1e7a0937180 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.764703] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for the task: (returnval){ [ 1748.764703] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520c5b7a-5178-bcac-cf36-6fb22a55a098" [ 1748.764703] env[68443]: _type = "Task" [ 1748.764703] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.771525] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520c5b7a-5178-bcac-cf36-6fb22a55a098, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.831130] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1748.831417] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1748.831574] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Deleting the datastore file [datastore1] 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.831858] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-faa87ecb-5c5a-4fa8-8fcd-ddcd3521daed {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.838671] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for the task: (returnval){ [ 1748.838671] env[68443]: value = "task-3374044" [ 1748.838671] env[68443]: _type = "Task" [ 1748.838671] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.845932] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Task: {'id': task-3374044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.275626] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1749.276022] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Creating directory with path [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1749.276073] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70f0163e-d47e-4cdd-b335-84bdb6cf3600 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.287256] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Created directory with path [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1749.287491] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Fetch image to [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1749.287698] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1749.288431] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e16a5a1-9bfd-4926-973a-131359fb197e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.294891] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3a1c0f-d72a-468b-a093-3e540461ddac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.303604] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa11f8e3-5853-4d40-816b-e5970934ec26 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.333578] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40a3216-ab38-4e2b-b1f4-acaf703b5104 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.338772] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-237d5624-1db2-46d2-a43e-0e7dba045cf3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.347526] env[68443]: DEBUG oslo_vmware.api [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Task: {'id': task-3374044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067646} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.347771] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1749.347957] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1749.348148] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1749.348329] env[68443]: INFO nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1749.350439] env[68443]: DEBUG nova.compute.claims [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1749.350609] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.350816] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.362020] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1749.414864] env[68443]: DEBUG oslo_vmware.rw_handles [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1749.495413] env[68443]: DEBUG oslo_vmware.rw_handles [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1749.495413] env[68443]: DEBUG oslo_vmware.rw_handles [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1749.646273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930b47e2-ca8d-4b4f-be16-d28a2b8d5f7f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.655481] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1690ee76-ec1b-4e8a-8bfd-f087f5bd62ed {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.684564] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e33a7a-9995-4573-8c22-8abbf9f9f5e2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.691565] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e115718-6d2a-4326-bf4c-fe73040e6403 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.704346] env[68443]: DEBUG nova.compute.provider_tree [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.713320] env[68443]: DEBUG nova.scheduler.client.report [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1749.729879] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.379s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.730446] env[68443]: ERROR nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1749.730446] env[68443]: Faults: ['InvalidArgument'] [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Traceback (most recent call last): [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self.driver.spawn(context, instance, image_meta, [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self._fetch_image_if_missing(context, vi) [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] image_cache(vi, tmp_image_ds_loc) [ 1749.730446] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] vm_util.copy_virtual_disk( [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] session._wait_for_task(vmdk_copy_task) [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return self.wait_for_task(task_ref) [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return evt.wait() [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] result = hub.switch() [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] return self.greenlet.switch() [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1749.730807] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] self.f(*self.args, **self.kw) [ 1749.731151] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1749.731151] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] raise exceptions.translate_fault(task_info.error) [ 1749.731151] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1749.731151] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Faults: ['InvalidArgument'] [ 1749.731151] env[68443]: ERROR nova.compute.manager [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] [ 1749.731303] env[68443]: DEBUG nova.compute.utils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1749.732648] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Build of instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e was re-scheduled: A specified parameter was not correct: fileType [ 1749.732648] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1749.733023] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1749.733205] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1749.733376] env[68443]: DEBUG nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1749.733539] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1749.824904] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.201553] env[68443]: DEBUG nova.network.neutron [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.212924] env[68443]: INFO nova.compute.manager [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Took 0.48 seconds to deallocate network for instance. [ 1750.298240] env[68443]: INFO nova.scheduler.client.report [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Deleted allocations for instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e [ 1750.319049] env[68443]: DEBUG oslo_concurrency.lockutils [None req-97e80fac-7956-4cf7-b890-47d43d01b0c1 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.804s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.320187] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 396.356s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.320477] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Acquiring lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.320711] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.320883] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.323030] env[68443]: INFO nova.compute.manager [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Terminating instance [ 1750.324877] env[68443]: DEBUG nova.compute.manager [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1750.325092] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1750.325723] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8ee247e-6dfd-46fb-9ac5-273cc29cfbb0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.336313] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c15900e-7607-4139-9357-c567205a42d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.348610] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1750.369305] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e could not be found. [ 1750.369496] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1750.369668] env[68443]: INFO nova.compute.manager [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1750.369911] env[68443]: DEBUG oslo.service.loopingcall [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.370164] env[68443]: DEBUG nova.compute.manager [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1750.370265] env[68443]: DEBUG nova.network.neutron [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1750.394887] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.395138] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.396524] env[68443]: INFO nova.compute.claims [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1750.399872] env[68443]: DEBUG nova.network.neutron [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.407370] env[68443]: INFO nova.compute.manager [-] [instance: 76462ea2-393b-4b3a-baf5-e6f3cffa2e5e] Took 0.04 seconds to deallocate network for instance. [ 1750.505490] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b80fb5e7-d182-492a-89d1-4f8145f154a0 tempest-ServerAddressesNegativeTestJSON-1759810339 tempest-ServerAddressesNegativeTestJSON-1759810339-project-member] Lock "76462ea2-393b-4b3a-baf5-e6f3cffa2e5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.629451] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91688095-0893-457a-b311-ad2bd8cae106 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.637378] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c7c07f-0935-45d2-a147-639873b2aa31 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.667797] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e114e9e4-3d65-45e0-89b7-2700dfd4492b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.675216] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030d32eb-70b2-4f7c-ae5e-01e3966d8578 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.689882] env[68443]: DEBUG nova.compute.provider_tree [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.699998] env[68443]: DEBUG nova.scheduler.client.report [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.714476] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.714979] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1750.748762] env[68443]: DEBUG nova.compute.utils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1750.751028] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Not allocating networking since 'none' was specified. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1750.762833] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1750.824675] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.826831] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1750.853024] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.853024] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.853211] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.853317] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.853466] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.853670] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.853907] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.854082] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.854251] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.854413] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.854588] env[68443]: DEBUG nova.virt.hardware [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.855482] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a9547b-5c02-4b55-8f3d-7fab4088e778 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.864185] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc2037b-6627-4024-bb87-7235011de7e1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.878049] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance VIF info [] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1750.883676] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating folder: Project (acc95ea2e5c647f39f9ba532745c3422). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1750.884013] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e430d54e-500e-4ef2-bb36-ad23de4b0fcf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.894821] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created folder: Project (acc95ea2e5c647f39f9ba532745c3422) in parent group-v673136. [ 1750.895174] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating folder: Instances. Parent ref: group-v673227. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1750.895916] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b703b0e-522e-43a3-aedb-1a0fa3605002 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.905605] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created folder: Instances in parent group-v673227. [ 1750.905968] env[68443]: DEBUG oslo.service.loopingcall [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.906271] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1750.906564] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65e8f8f2-12f7-4c67-8dcd-8010ff8fe830 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.931366] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1750.931366] env[68443]: value = "task-3374047" [ 1750.931366] env[68443]: _type = "Task" [ 1750.931366] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.941538] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374047, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.442024] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374047, 'name': CreateVM_Task} progress is 99%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.942382] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374047, 'name': CreateVM_Task, 'duration_secs': 0.582074} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.942556] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1751.942997] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.943176] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.943530] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1751.943763] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9087346d-a2c7-4dda-ae5f-4c0f3436d406 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.948651] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 1751.948651] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52dde7e1-ddf0-d01e-f614-b98708188574" [ 1751.948651] env[68443]: _type = "Task" [ 1751.948651] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.957031] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52dde7e1-ddf0-d01e-f614-b98708188574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.459531] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.459879] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.459974] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.825649] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.825946] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.837174] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.837423] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.837708] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.838197] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1753.839441] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e4f19e-64d2-4a8b-94bd-a829348a4488 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.849030] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318d2b26-8fd2-46b6-a1cd-cb66ad504eb3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.864596] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92bf3f96-1f56-4d00-a51e-58b5a55efe69 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.870378] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace0b880-0dbf-4681-b424-ad63d27fa71d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.899559] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180982MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1753.899748] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.899933] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.983556] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a4708485-db53-416e-94be-f9a017eb28c4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.983741] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.983855] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.983978] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984118] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984237] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984356] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984471] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984588] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.984702] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1753.997009] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.007947] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.018515] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.036069] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.046838] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a81ad5e2-d4bb-4ef0-a268-c7012538821d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1754.047085] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1754.047235] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1754.230099] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21491a2e-8e12-453b-87bf-3355f26387a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.237683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bb4924-b85c-45c0-8466-b7822747a73a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.267896] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6749b0bf-17ff-441b-ae1a-a2d217018301 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.274832] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339c0a76-4e3a-4e41-a5ac-5d7b1d8d50bb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.287507] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.295790] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1754.309442] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1754.309651] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.410s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.309199] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.309553] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1755.309599] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1755.330952] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.331194] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.331327] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.331526] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.331691] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.331976] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.332202] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.332356] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.332519] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.333060] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1755.333312] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1755.333995] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.334239] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1755.334406] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1757.845945] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.824542] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1761.821128] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1799.006699] env[68443]: WARNING oslo_vmware.rw_handles [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1799.006699] env[68443]: ERROR oslo_vmware.rw_handles [ 1799.007478] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1799.009197] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1799.009442] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Copying Virtual Disk [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/60a60fa3-18c3-4be1-b0c7-712b3e8fbc44/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1799.009725] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f3f5274-bef1-4ce1-8ae4-6d71371debc6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.017456] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for the task: (returnval){ [ 1799.017456] env[68443]: value = "task-3374048" [ 1799.017456] env[68443]: _type = "Task" [ 1799.017456] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.025122] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Task: {'id': task-3374048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.527516] env[68443]: DEBUG oslo_vmware.exceptions [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1799.527799] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.528353] env[68443]: ERROR nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1799.528353] env[68443]: Faults: ['InvalidArgument'] [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] Traceback (most recent call last): [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] yield resources [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self.driver.spawn(context, instance, image_meta, [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self._fetch_image_if_missing(context, vi) [ 1799.528353] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] image_cache(vi, tmp_image_ds_loc) [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] vm_util.copy_virtual_disk( [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] session._wait_for_task(vmdk_copy_task) [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return self.wait_for_task(task_ref) [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return evt.wait() [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] result = hub.switch() [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1799.528752] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return self.greenlet.switch() [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self.f(*self.args, **self.kw) [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] raise exceptions.translate_fault(task_info.error) [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] Faults: ['InvalidArgument'] [ 1799.529177] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] [ 1799.529177] env[68443]: INFO nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Terminating instance [ 1799.530269] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.530473] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.530712] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b8b996d-eaee-446f-b146-e19ff15f42cd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.532875] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1799.533081] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1799.533953] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af9c1dc-eafa-45d7-b2e9-db5a7b2f27c5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.541698] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1799.541930] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b733e3e3-f564-4afa-97c0-9ef0fdeceb3a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.544062] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.544235] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1799.545190] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4d2c5d9-2bef-40ea-be26-5887803e7089 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.550026] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1799.550026] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]521a2f5e-4ca8-3d9e-145f-55c434bd61dd" [ 1799.550026] env[68443]: _type = "Task" [ 1799.550026] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.559442] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]521a2f5e-4ca8-3d9e-145f-55c434bd61dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.615327] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1799.615540] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1799.615722] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Deleting the datastore file [datastore1] a4708485-db53-416e-94be-f9a017eb28c4 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1799.615989] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-739cd8bd-8dc5-46a5-b163-b6d272937d6f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.621697] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for the task: (returnval){ [ 1799.621697] env[68443]: value = "task-3374050" [ 1799.621697] env[68443]: _type = "Task" [ 1799.621697] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.629420] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Task: {'id': task-3374050, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.060495] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1800.060862] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1800.060993] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c431dff-d7b4-47cf-8f03-20930e68605c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.072072] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1800.072257] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Fetch image to [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1800.072426] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1800.073124] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b2b057-da91-43ce-b39b-a9e34f3d824d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.079251] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3015501a-96a8-4e6f-ba3d-7530c9c79ccd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.087834] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996c2305-d1ce-46ba-9a54-7641f431ae6b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.117278] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16b6dd5-6359-4d0d-b013-43f95e31cee7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.125060] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-76b05207-6d94-49b5-b8d7-9ef3d0e09384 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.130749] env[68443]: DEBUG oslo_vmware.api [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Task: {'id': task-3374050, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066918} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.130976] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1800.131161] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1800.131343] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1800.131512] env[68443]: INFO nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1800.133572] env[68443]: DEBUG nova.compute.claims [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1800.133740] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.133958] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.146520] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1800.197045] env[68443]: DEBUG oslo_vmware.rw_handles [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1800.257038] env[68443]: DEBUG oslo_vmware.rw_handles [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1800.257232] env[68443]: DEBUG oslo_vmware.rw_handles [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1800.392736] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55e6a13-0a7e-4138-ab25-4bbb19ad6257 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.400109] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772291f1-dba3-45b7-9a27-caccb26285c4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.430962] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09401914-6a2a-4aa0-b606-0bb706fc3799 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.438147] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4300449f-d526-4286-af1d-2d0ade2e50d5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.450716] env[68443]: DEBUG nova.compute.provider_tree [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.459038] env[68443]: DEBUG nova.scheduler.client.report [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1800.474859] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.475384] env[68443]: ERROR nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.475384] env[68443]: Faults: ['InvalidArgument'] [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] Traceback (most recent call last): [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self.driver.spawn(context, instance, image_meta, [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self._fetch_image_if_missing(context, vi) [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] image_cache(vi, tmp_image_ds_loc) [ 1800.475384] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] vm_util.copy_virtual_disk( [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] session._wait_for_task(vmdk_copy_task) [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return self.wait_for_task(task_ref) [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return evt.wait() [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] result = hub.switch() [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] return self.greenlet.switch() [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1800.475722] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] self.f(*self.args, **self.kw) [ 1800.476255] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1800.476255] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] raise exceptions.translate_fault(task_info.error) [ 1800.476255] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1800.476255] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] Faults: ['InvalidArgument'] [ 1800.476255] env[68443]: ERROR nova.compute.manager [instance: a4708485-db53-416e-94be-f9a017eb28c4] [ 1800.476255] env[68443]: DEBUG nova.compute.utils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1800.477679] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Build of instance a4708485-db53-416e-94be-f9a017eb28c4 was re-scheduled: A specified parameter was not correct: fileType [ 1800.477679] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1800.478052] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1800.478414] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1800.478414] env[68443]: DEBUG nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1800.478536] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1800.804123] env[68443]: DEBUG nova.network.neutron [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.831156] env[68443]: INFO nova.compute.manager [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Took 0.35 seconds to deallocate network for instance. [ 1800.934196] env[68443]: INFO nova.scheduler.client.report [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Deleted allocations for instance a4708485-db53-416e-94be-f9a017eb28c4 [ 1800.961078] env[68443]: DEBUG oslo_concurrency.lockutils [None req-ad18cb23-d9aa-46a1-a27c-e78a8952de47 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.288s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.962327] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.643s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.962545] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Acquiring lock "a4708485-db53-416e-94be-f9a017eb28c4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.962813] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.962930] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.965431] env[68443]: INFO nova.compute.manager [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Terminating instance [ 1800.967041] env[68443]: DEBUG nova.compute.manager [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1800.967705] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1800.967705] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-300124c9-9679-4c82-a885-9dcd0ada652b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.977018] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba90467-e7a3-4654-9aa0-f38848ac2ce9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.988649] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1801.011361] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a4708485-db53-416e-94be-f9a017eb28c4 could not be found. [ 1801.011585] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1801.011772] env[68443]: INFO nova.compute.manager [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1801.011992] env[68443]: DEBUG oslo.service.loopingcall [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.012567] env[68443]: DEBUG nova.compute.manager [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1801.012567] env[68443]: DEBUG nova.network.neutron [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1801.042454] env[68443]: DEBUG nova.network.neutron [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.046435] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.046726] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.048188] env[68443]: INFO nova.compute.claims [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.051449] env[68443]: INFO nova.compute.manager [-] [instance: a4708485-db53-416e-94be-f9a017eb28c4] Took 0.04 seconds to deallocate network for instance. [ 1801.138306] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4ac13cc1-72b4-4092-a436-f050df7799a5 tempest-ServerActionsTestOtherA-1089406008 tempest-ServerActionsTestOtherA-1089406008-project-member] Lock "a4708485-db53-416e-94be-f9a017eb28c4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.268580] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593d4c93-d12f-460a-ad04-59e462ea4e2d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.276609] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e92e40-f44c-4a1f-b925-2b3ba75633b5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.306768] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c967a3-e1d7-4b67-9934-9a27e02d0cfe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.313915] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d44c81b-df5b-4ecf-b826-86036b2f823c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.328442] env[68443]: DEBUG nova.compute.provider_tree [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1801.338325] env[68443]: DEBUG nova.scheduler.client.report [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1801.354293] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.307s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.354902] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1801.391808] env[68443]: DEBUG nova.compute.utils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.393718] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Not allocating networking since 'none' was specified. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1801.403124] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1801.465474] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1801.498558] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1801.498899] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1801.499161] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.499369] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1801.499523] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.499677] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1801.499884] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1801.500056] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1801.500225] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1801.500424] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1801.500656] env[68443]: DEBUG nova.virt.hardware [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1801.501596] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f742fc65-c4b9-4347-a492-86ce3b17aa6c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.510265] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1487ad-31bb-4818-94b0-57c987b87541 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.524481] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance VIF info [] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1801.530418] env[68443]: DEBUG oslo.service.loopingcall [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.530935] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1801.530935] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e1f9341-90f2-475d-a7e6-f695c67f7aec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.548511] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1801.548511] env[68443]: value = "task-3374051" [ 1801.548511] env[68443]: _type = "Task" [ 1801.548511] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.558705] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374051, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.058587] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374051, 'name': CreateVM_Task, 'duration_secs': 0.252566} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.058753] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1802.059201] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.059378] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.059719] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1802.059955] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4458925-9e30-42f8-989e-8e48429fac2e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.064227] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 1802.064227] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52817ae5-e689-fc14-aebe-4089bb4dcd59" [ 1802.064227] env[68443]: _type = "Task" [ 1802.064227] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.071274] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52817ae5-e689-fc14-aebe-4089bb4dcd59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.539701] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.539701] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.575050] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.575050] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1802.575293] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.969577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.824828] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1810.825114] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.825389] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.837544] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.837762] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.837949] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.838175] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1813.839375] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517c7cb4-99ad-40c3-aa73-33bc979918b7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.848479] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52df4b88-7e27-4b4c-934a-a71e886c10a6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.862152] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50477da3-ca34-425e-a25d-7c90816049c4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.868167] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9db99c-4454-457e-b031-c5ae60a7167e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.896454] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180996MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1813.896692] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.896789] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.969903] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970082] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970211] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970334] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970452] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970574] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970689] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970804] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.970917] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.971040] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1813.981983] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1813.992917] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1814.002793] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1814.012314] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a81ad5e2-d4bb-4ef0-a268-c7012538821d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1814.021440] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1814.021657] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1814.021804] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1814.187325] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01625bc8-8dae-4fb3-a2e6-d98ab0543ba5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.195314] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d319e35e-2650-4b1f-8df2-a8a017989c17 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.224508] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e1faa3-def9-48a8-b505-c90ee4111c48 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.231460] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3de3e9-da4d-48ca-b384-f5ba2fae782c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.244411] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1814.252467] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.266303] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1814.266491] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.370s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.266409] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1815.266706] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1815.266752] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1815.290065] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.290295] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.290484] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.290664] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.290838] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291024] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291208] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291377] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291546] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291714] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1815.291882] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1815.292591] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1815.292806] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1815.824708] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1815.825026] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.820954] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.824286] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.166512] env[68443]: WARNING oslo_vmware.rw_handles [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1849.166512] env[68443]: ERROR oslo_vmware.rw_handles [ 1849.167656] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1849.169787] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1849.169995] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Copying Virtual Disk [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/dbfe7bbf-fbc9-4759-9124-11bb74a245a5/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1849.170301] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cfd944d-b82a-470d-98e4-a924bf83b509 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.177914] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1849.177914] env[68443]: value = "task-3374052" [ 1849.177914] env[68443]: _type = "Task" [ 1849.177914] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.185857] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.688945] env[68443]: DEBUG oslo_vmware.exceptions [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1849.689268] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.689854] env[68443]: ERROR nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1849.689854] env[68443]: Faults: ['InvalidArgument'] [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Traceback (most recent call last): [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] yield resources [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self.driver.spawn(context, instance, image_meta, [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self._fetch_image_if_missing(context, vi) [ 1849.689854] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] image_cache(vi, tmp_image_ds_loc) [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] vm_util.copy_virtual_disk( [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] session._wait_for_task(vmdk_copy_task) [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return self.wait_for_task(task_ref) [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return evt.wait() [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] result = hub.switch() [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1849.690253] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return self.greenlet.switch() [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self.f(*self.args, **self.kw) [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] raise exceptions.translate_fault(task_info.error) [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Faults: ['InvalidArgument'] [ 1849.690622] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] [ 1849.690622] env[68443]: INFO nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Terminating instance [ 1849.691782] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.692085] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1849.692334] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d41c8c3-01e8-4162-a594-85fe30fc776a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.695751] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1849.695942] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1849.696699] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5358990-5e96-4346-b25b-1d5dae012a57 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.703302] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1849.703513] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc8a6de1-818c-4331-8b89-6df33e7421ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.705528] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1849.705704] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1849.706649] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25b7d969-8c01-4f47-818c-1a0fea6fc341 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.711580] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for the task: (returnval){ [ 1849.711580] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]522fd570-fd78-cde0-eec3-f164f0a01582" [ 1849.711580] env[68443]: _type = "Task" [ 1849.711580] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.718715] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]522fd570-fd78-cde0-eec3-f164f0a01582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.772965] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1849.773182] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1849.773395] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleting the datastore file [datastore1] 18bae6a1-3bd0-4749-8795-5b8ccd18193f {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1849.773692] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c150480e-b6a4-484d-b48e-fb63dcac3f6a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.779755] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 1849.779755] env[68443]: value = "task-3374054" [ 1849.779755] env[68443]: _type = "Task" [ 1849.779755] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.787035] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.222321] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1850.222681] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Creating directory with path [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1850.222788] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-952b1573-5f5a-4287-a8e3-268c7c1579b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.233417] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Created directory with path [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1850.233619] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Fetch image to [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1850.233802] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1850.234508] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9037402f-f315-4754-95bc-2d010f22601a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.240980] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7579653b-766a-4925-9edb-8265d008238f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.249569] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013ba7de-47b6-4c63-8866-b179af8a3f9d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.280192] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845264a3-a551-4979-baaf-19e5b180d3e3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.290520] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-97492684-e678-4e1b-8ad5-b0559f94a1eb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.292106] env[68443]: DEBUG oslo_vmware.api [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07771} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.292338] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1850.292512] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1850.292683] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1850.292855] env[68443]: INFO nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1850.294842] env[68443]: DEBUG nova.compute.claims [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1850.295029] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.295250] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.319057] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1850.373655] env[68443]: DEBUG oslo_vmware.rw_handles [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1850.432807] env[68443]: DEBUG oslo_vmware.rw_handles [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1850.433012] env[68443]: DEBUG oslo_vmware.rw_handles [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1850.568481] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15741f1a-ff6d-4e4e-af13-901196bbd3b3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.576503] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aab0a4a-8c3e-4040-aee6-0d4714722950 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.606601] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa551db9-50de-4089-b932-da393f864258 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.613998] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bbe02f-65a8-4547-80e6-6be6bd9a7a36 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.627242] env[68443]: DEBUG nova.compute.provider_tree [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.637764] env[68443]: DEBUG nova.scheduler.client.report [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1850.652727] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.357s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.653305] env[68443]: ERROR nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1850.653305] env[68443]: Faults: ['InvalidArgument'] [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Traceback (most recent call last): [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self.driver.spawn(context, instance, image_meta, [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self._fetch_image_if_missing(context, vi) [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] image_cache(vi, tmp_image_ds_loc) [ 1850.653305] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] vm_util.copy_virtual_disk( [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] session._wait_for_task(vmdk_copy_task) [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return self.wait_for_task(task_ref) [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return evt.wait() [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] result = hub.switch() [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] return self.greenlet.switch() [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1850.653727] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] self.f(*self.args, **self.kw) [ 1850.654368] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1850.654368] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] raise exceptions.translate_fault(task_info.error) [ 1850.654368] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1850.654368] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Faults: ['InvalidArgument'] [ 1850.654368] env[68443]: ERROR nova.compute.manager [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] [ 1850.654368] env[68443]: DEBUG nova.compute.utils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1850.655757] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Build of instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f was re-scheduled: A specified parameter was not correct: fileType [ 1850.655757] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1850.656133] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1850.656369] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1850.656593] env[68443]: DEBUG nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1850.656768] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1850.962434] env[68443]: DEBUG nova.network.neutron [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.974346] env[68443]: INFO nova.compute.manager [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Took 0.32 seconds to deallocate network for instance. [ 1851.069140] env[68443]: INFO nova.scheduler.client.report [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted allocations for instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f [ 1851.092831] env[68443]: DEBUG oslo_concurrency.lockutils [None req-784d6d29-1459-47f8-a0ae-42b87d1f1d84 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 544.333s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.094119] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 348.661s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.094311] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.094515] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.094679] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.097623] env[68443]: INFO nova.compute.manager [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Terminating instance [ 1851.099391] env[68443]: DEBUG nova.compute.manager [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1851.099615] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1851.100094] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1184443d-ae6b-46d5-92ec-7a60404aeb38 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.109667] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da7f4d1-c54c-4524-8936-41fc080cf412 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.120420] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1851.141189] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18bae6a1-3bd0-4749-8795-5b8ccd18193f could not be found. [ 1851.141308] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1851.141498] env[68443]: INFO nova.compute.manager [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1851.141745] env[68443]: DEBUG oslo.service.loopingcall [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.141973] env[68443]: DEBUG nova.compute.manager [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1851.142083] env[68443]: DEBUG nova.network.neutron [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1851.169142] env[68443]: DEBUG nova.network.neutron [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.176184] env[68443]: INFO nova.compute.manager [-] [instance: 18bae6a1-3bd0-4749-8795-5b8ccd18193f] Took 0.03 seconds to deallocate network for instance. [ 1851.182055] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.182055] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.183136] env[68443]: INFO nova.compute.claims [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1851.260171] env[68443]: DEBUG oslo_concurrency.lockutils [None req-0f375b30-f150-48f5-a8b0-ec3143751558 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "18bae6a1-3bd0-4749-8795-5b8ccd18193f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.385737] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcdb7d4-03de-4974-a2f6-774a0177a41f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.393029] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9204a5-686c-40ae-961f-4e05c75689d7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.422831] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bb5389-b0db-4926-a27a-d952c5d66c91 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.429482] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8728f0d1-4826-4db1-9949-a5bf53eb1de2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.441977] env[68443]: DEBUG nova.compute.provider_tree [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1851.451346] env[68443]: DEBUG nova.scheduler.client.report [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1851.464261] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.464737] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1851.497270] env[68443]: DEBUG nova.compute.utils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1851.498608] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1851.498780] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1851.506462] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1851.553321] env[68443]: DEBUG nova.policy [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d075bc616994586be6080091a32f972', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c46000b75d954ac392483aa5f445d297', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1851.565573] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1851.590683] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1851.590928] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1851.591100] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1851.591295] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1851.591441] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1851.591578] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1851.591778] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1851.591934] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1851.592116] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1851.592282] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1851.592455] env[68443]: DEBUG nova.virt.hardware [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1851.593359] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ac34c9-60f1-4e80-986d-8cbf537ada1d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.601768] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d71793-f624-4655-b819-1ff16978bb8f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.056834] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Successfully created port: e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1852.570824] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.571167] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.653584] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Successfully updated port: e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1852.665749] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.665921] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquired lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.666102] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1852.708938] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1852.881390] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Updating instance_info_cache with network_info: [{"id": "e3a523d1-27a8-415c-a286-b556630b2781", "address": "fa:16:3e:5f:8d:68", "network": {"id": "54a83cdc-e7ea-4604-a165-6901bb2f6ccd", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-636635065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c46000b75d954ac392483aa5f445d297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3a523d1-27", "ovs_interfaceid": "e3a523d1-27a8-415c-a286-b556630b2781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.894113] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Releasing lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.894416] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance network_info: |[{"id": "e3a523d1-27a8-415c-a286-b556630b2781", "address": "fa:16:3e:5f:8d:68", "network": {"id": "54a83cdc-e7ea-4604-a165-6901bb2f6ccd", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-636635065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c46000b75d954ac392483aa5f445d297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3a523d1-27", "ovs_interfaceid": "e3a523d1-27a8-415c-a286-b556630b2781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1852.894845] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:8d:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3a523d1-27a8-415c-a286-b556630b2781', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1852.902335] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Creating folder: Project (c46000b75d954ac392483aa5f445d297). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1852.902850] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81897ad1-e4b7-46c9-9724-85b513e94beb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.914075] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Created folder: Project (c46000b75d954ac392483aa5f445d297) in parent group-v673136. [ 1852.914261] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Creating folder: Instances. Parent ref: group-v673231. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1852.914482] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1349584e-2126-49fa-a2dd-fa28e418568f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.922364] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Created folder: Instances in parent group-v673231. [ 1852.922616] env[68443]: DEBUG oslo.service.loopingcall [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.922797] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1852.922986] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da253df8-07e7-46d9-be14-fbee431c2536 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.941480] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1852.941480] env[68443]: value = "task-3374057" [ 1852.941480] env[68443]: _type = "Task" [ 1852.941480] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.948864] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374057, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.055661] env[68443]: DEBUG nova.compute.manager [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Received event network-vif-plugged-e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1853.055661] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Acquiring lock "12b39079-051e-4997-9fa1-7e467af04306-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.055661] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Lock "12b39079-051e-4997-9fa1-7e467af04306-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.055661] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Lock "12b39079-051e-4997-9fa1-7e467af04306-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.056133] env[68443]: DEBUG nova.compute.manager [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] No waiting events found dispatching network-vif-plugged-e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1853.056133] env[68443]: WARNING nova.compute.manager [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Received unexpected event network-vif-plugged-e3a523d1-27a8-415c-a286-b556630b2781 for instance with vm_state building and task_state spawning. [ 1853.056133] env[68443]: DEBUG nova.compute.manager [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Received event network-changed-e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1853.056255] env[68443]: DEBUG nova.compute.manager [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Refreshing instance network info cache due to event network-changed-e3a523d1-27a8-415c-a286-b556630b2781. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1853.056388] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Acquiring lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.056561] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Acquired lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.056769] env[68443]: DEBUG nova.network.neutron [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Refreshing network info cache for port e3a523d1-27a8-415c-a286-b556630b2781 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1853.308857] env[68443]: DEBUG nova.network.neutron [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Updated VIF entry in instance network info cache for port e3a523d1-27a8-415c-a286-b556630b2781. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1853.309245] env[68443]: DEBUG nova.network.neutron [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Updating instance_info_cache with network_info: [{"id": "e3a523d1-27a8-415c-a286-b556630b2781", "address": "fa:16:3e:5f:8d:68", "network": {"id": "54a83cdc-e7ea-4604-a165-6901bb2f6ccd", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-636635065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c46000b75d954ac392483aa5f445d297", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3a523d1-27", "ovs_interfaceid": "e3a523d1-27a8-415c-a286-b556630b2781", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.319470] env[68443]: DEBUG oslo_concurrency.lockutils [req-c075a675-f07a-4945-9df1-f0df2bc760ca req-1d74e51e-1fb8-4077-a85c-20118d3fec92 service nova] Releasing lock "refresh_cache-12b39079-051e-4997-9fa1-7e467af04306" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.452058] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374057, 'name': CreateVM_Task, 'duration_secs': 0.305784} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.452228] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1853.452883] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.453133] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.453466] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1853.453753] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2d688a4-1381-4c9f-b3f1-024bd97d00d5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.457869] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for the task: (returnval){ [ 1853.457869] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]529f0838-e6b7-a6f2-f975-27dbe8bd2da5" [ 1853.457869] env[68443]: _type = "Task" [ 1853.457869] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.464971] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]529f0838-e6b7-a6f2-f975-27dbe8bd2da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.968920] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.969284] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1853.969401] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.640991] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "12b39079-051e-4997-9fa1-7e467af04306" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.825726] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1871.825726] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1874.826050] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1874.837139] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.837354] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.837525] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.837688] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1874.838807] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493153b2-9e49-4286-a8e5-278fdbfbf18f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.847899] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8382f211-c1ce-4ec3-8dc3-5174e3464088 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.861755] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ede90bd-2468-451a-80e0-55d442acc02a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.867874] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4679e4e1-91c5-4a92-9c33-05b5de11a576 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.895918] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180946MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1874.896081] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.896270] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.043755] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.043904] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044061] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044199] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044325] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044450] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044566] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044684] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044800] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.044915] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1875.056382] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1875.066881] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1875.078861] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a81ad5e2-d4bb-4ef0-a268-c7012538821d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1875.088365] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1875.098053] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1875.098053] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1875.098053] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1875.114591] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1875.129401] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1875.129589] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1875.140504] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1875.157867] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1875.315022] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a2d62b-f214-43c0-9260-998574e2f571 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.322299] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d71797-9719-427f-97c1-d6cdad6b710b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.351078] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8685b70d-7999-4e1d-8de7-af74488097f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.358195] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bbc85a-1897-4798-a1a2-df4b091fab16 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.371961] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.380226] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1875.394010] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1875.394203] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.498s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.825474] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1875.825652] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1875.825799] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1875.848023] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848370] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848370] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848444] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848552] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848811] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.848949] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.849083] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.849206] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1875.849327] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1875.849888] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1875.850043] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1875.859057] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1875.859057] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.839441] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.839597] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1876.839664] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1877.826057] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.826057] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.826057] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1879.830883] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.826810] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.821024] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.040053] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.059886] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 1894.059886] env[68443]: value = "domain-c8" [ 1894.059886] env[68443]: _type = "ClusterComputeResource" [ 1894.059886] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1894.061186] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a721e4-eb9d-4b59-9d4a-7a737d5bfd92 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.078788] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 10 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1894.078959] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 99b16cd5-beb0-4f71-8011-411b84ddf497 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079178] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079345] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 7366efe5-c640-4689-97a1-fba0ac431b12 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079504] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 04fc458a-a928-43ef-8fd0-bfc49989d2b1 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079659] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 2985403d-348f-473d-ad1f-75fb67d3be12 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079814] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 0a9bb99d-8f94-4f26-990e-a57aac09c328 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.079969] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid b8c2916e-3b70-42c9-9f85-ee8582c636b8 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.080131] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 062710e8-2ccb-4926-97ce-bf6a9fa4d10c {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.080282] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 963d7f65-a761-4ce1-b6d1-fc987c3111c0 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.080430] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 12b39079-051e-4997-9fa1-7e467af04306 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1894.080755] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "99b16cd5-beb0-4f71-8011-411b84ddf497" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.080995] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.081219] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "7366efe5-c640-4689-97a1-fba0ac431b12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.081416] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.081611] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "2985403d-348f-473d-ad1f-75fb67d3be12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.081801] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.081992] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.082213] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.082403] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.082610] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "12b39079-051e-4997-9fa1-7e467af04306" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.389378] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "da00322f-5482-4511-94a4-2e2f3705fb99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.389692] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.540973] env[68443]: WARNING oslo_vmware.rw_handles [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1897.540973] env[68443]: ERROR oslo_vmware.rw_handles [ 1897.541538] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1897.543717] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1897.543970] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Copying Virtual Disk [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/8e51e797-4260-4923-819a-df15a21e0fef/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1897.544261] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e739271-6a89-47e1-a85e-63c340094072 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.551817] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for the task: (returnval){ [ 1897.551817] env[68443]: value = "task-3374058" [ 1897.551817] env[68443]: _type = "Task" [ 1897.551817] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.559488] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Task: {'id': task-3374058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.062039] env[68443]: DEBUG oslo_vmware.exceptions [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1898.062330] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.062884] env[68443]: ERROR nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1898.062884] env[68443]: Faults: ['InvalidArgument'] [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Traceback (most recent call last): [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] yield resources [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self.driver.spawn(context, instance, image_meta, [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self._fetch_image_if_missing(context, vi) [ 1898.062884] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] image_cache(vi, tmp_image_ds_loc) [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] vm_util.copy_virtual_disk( [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] session._wait_for_task(vmdk_copy_task) [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return self.wait_for_task(task_ref) [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return evt.wait() [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] result = hub.switch() [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1898.063303] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return self.greenlet.switch() [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self.f(*self.args, **self.kw) [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] raise exceptions.translate_fault(task_info.error) [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Faults: ['InvalidArgument'] [ 1898.063726] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] [ 1898.063726] env[68443]: INFO nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Terminating instance [ 1898.064814] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.065033] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.065656] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1898.065857] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1898.066088] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b9d3555-bc2a-4d56-814f-7c3579a59b8c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.068277] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b016283a-71fc-46d0-a5bc-66fe69705875 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.074626] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1898.074831] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ab5c1fe-3896-46a3-94f8-42ee2be81b55 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.076862] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.077046] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1898.077970] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef335925-7113-40cd-a1b3-5dc9aed865f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.082271] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1898.082271] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]523517e0-38c3-09a9-6bdd-9d0785f24340" [ 1898.082271] env[68443]: _type = "Task" [ 1898.082271] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.089016] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]523517e0-38c3-09a9-6bdd-9d0785f24340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.137655] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1898.137879] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1898.138078] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Deleting the datastore file [datastore1] 99b16cd5-beb0-4f71-8011-411b84ddf497 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1898.138345] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cce23ce-b83d-4363-9bc1-3801e18573e8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.144874] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for the task: (returnval){ [ 1898.144874] env[68443]: value = "task-3374060" [ 1898.144874] env[68443]: _type = "Task" [ 1898.144874] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.153391] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Task: {'id': task-3374060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.593470] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1898.593774] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1898.594015] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f585aac5-cc9a-4387-b916-bb0b3e3d4d0d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.604912] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1898.605168] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Fetch image to [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1898.605287] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1898.606039] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a836c7fb-d7e8-447f-84e4-2cf736b0f493 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.612565] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f0f120-5aa0-4a63-a6cf-c233ded6e623 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.621152] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20a06f7-5e83-4c5d-8283-7b6011b4147d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.654305] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ca0037-3c35-4d3f-a041-10a8ee73dd98 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.663603] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3c8b107e-4b00-4df1-ad4b-019cbd185c06 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.665237] env[68443]: DEBUG oslo_vmware.api [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Task: {'id': task-3374060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078923} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.665473] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1898.665649] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1898.665816] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1898.665986] env[68443]: INFO nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1898.668116] env[68443]: DEBUG nova.compute.claims [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1898.668289] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.668519] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.686708] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1898.738483] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1898.802220] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1898.802411] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1898.922261] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0583901c-10cf-4570-8a12-8b38a6a128fb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.930017] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f892908d-b820-407f-9e04-ea545f8dc950 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.961462] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebfdae5-79c8-44f6-85e4-e70f22fb815e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.968479] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64109181-95f6-4e09-97b5-0f605edb8fcf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.981703] env[68443]: DEBUG nova.compute.provider_tree [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.990248] env[68443]: DEBUG nova.scheduler.client.report [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.004943] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.005489] env[68443]: ERROR nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1899.005489] env[68443]: Faults: ['InvalidArgument'] [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Traceback (most recent call last): [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self.driver.spawn(context, instance, image_meta, [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self._fetch_image_if_missing(context, vi) [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] image_cache(vi, tmp_image_ds_loc) [ 1899.005489] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] vm_util.copy_virtual_disk( [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] session._wait_for_task(vmdk_copy_task) [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return self.wait_for_task(task_ref) [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return evt.wait() [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] result = hub.switch() [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] return self.greenlet.switch() [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1899.005882] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] self.f(*self.args, **self.kw) [ 1899.006301] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1899.006301] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] raise exceptions.translate_fault(task_info.error) [ 1899.006301] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1899.006301] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Faults: ['InvalidArgument'] [ 1899.006301] env[68443]: ERROR nova.compute.manager [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] [ 1899.006301] env[68443]: DEBUG nova.compute.utils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1899.007705] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Build of instance 99b16cd5-beb0-4f71-8011-411b84ddf497 was re-scheduled: A specified parameter was not correct: fileType [ 1899.007705] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1899.008082] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1899.008258] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1899.008475] env[68443]: DEBUG nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1899.008691] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1899.304431] env[68443]: DEBUG nova.network.neutron [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.316401] env[68443]: INFO nova.compute.manager [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Took 0.31 seconds to deallocate network for instance. [ 1899.404878] env[68443]: INFO nova.scheduler.client.report [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Deleted allocations for instance 99b16cd5-beb0-4f71-8011-411b84ddf497 [ 1899.427719] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6a72c5c3-e59a-4443-8b43-85c8f3a16b08 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 562.094s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.428783] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 365.353s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.429027] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Acquiring lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.429239] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.429410] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.431421] env[68443]: INFO nova.compute.manager [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Terminating instance [ 1899.433012] env[68443]: DEBUG nova.compute.manager [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1899.433203] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1899.433712] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-711fcfa6-69e5-4379-a608-f853cc20a71a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.444732] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbc452e-c262-46aa-95cc-cbc28d1245aa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.455769] env[68443]: DEBUG nova.compute.manager [None req-b7fec8f2-b8fa-4881-9de9-51d40c3ac3ad tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1899.477342] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 99b16cd5-beb0-4f71-8011-411b84ddf497 could not be found. [ 1899.477567] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1899.477758] env[68443]: INFO nova.compute.manager [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1899.478181] env[68443]: DEBUG oslo.service.loopingcall [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.478276] env[68443]: DEBUG nova.compute.manager [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1899.478366] env[68443]: DEBUG nova.network.neutron [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1899.480755] env[68443]: DEBUG nova.compute.manager [None req-b7fec8f2-b8fa-4881-9de9-51d40c3ac3ad tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1899.501863] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b7fec8f2-b8fa-4881-9de9-51d40c3ac3ad tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "cf4f7ce7-55d5-4f5a-b19d-7674ee1969b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.914s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.503888] env[68443]: DEBUG nova.network.neutron [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.511476] env[68443]: INFO nova.compute.manager [-] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] Took 0.03 seconds to deallocate network for instance. [ 1899.511791] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1899.558835] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.559085] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.560463] env[68443]: INFO nova.compute.claims [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1899.597750] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9802ce55-da7d-4183-95c6-61f46bb80e98 tempest-ServerAddressesTestJSON-3972615 tempest-ServerAddressesTestJSON-3972615-project-member] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.169s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.598820] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.518s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.598963] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 99b16cd5-beb0-4f71-8011-411b84ddf497] During sync_power_state the instance has a pending task (deleting). Skip. [ 1899.599087] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "99b16cd5-beb0-4f71-8011-411b84ddf497" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.770163] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50ef833-e98e-4b89-86ed-c8b0f13d0086 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.777066] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe24553-c059-463e-b075-26804f073a3d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.805941] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63632584-670a-47d5-a0e4-9f915f765c93 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.812801] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f1c5b4-18ba-43c8-a7e1-87a5f048571a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.826504] env[68443]: DEBUG nova.compute.provider_tree [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1899.834989] env[68443]: DEBUG nova.scheduler.client.report [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1899.854604] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.855126] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1899.887329] env[68443]: DEBUG nova.compute.utils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.888942] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1899.888942] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1899.896469] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1899.949839] env[68443]: DEBUG nova.policy [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c9056fd69304807abfeb2fedc4ae20f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d0592ea4b3c49698b73391ae2be0ad8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1899.956698] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1899.981647] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1899.982745] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1899.982745] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1899.982745] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1899.982745] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1899.982745] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1899.983022] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1899.983022] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1899.983100] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1899.983204] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1899.983375] env[68443]: DEBUG nova.virt.hardware [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1899.984287] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f557c57-8e06-49b0-86db-70aeb8ab67c3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.992891] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03777b6b-bc41-47c6-9b7e-291d5f1a7e99 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.252763] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Successfully created port: 68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1900.839837] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Successfully updated port: 68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1900.854078] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1900.854235] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1900.854386] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1900.895634] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1901.064756] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Updating instance_info_cache with network_info: [{"id": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "address": "fa:16:3e:ba:db:6c", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68118aaa-0f", "ovs_interfaceid": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.075522] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1901.075790] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance network_info: |[{"id": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "address": "fa:16:3e:ba:db:6c", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68118aaa-0f", "ovs_interfaceid": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1901.076217] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:db:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68118aaa-0f46-4631-82c7-a437dc7deaeb', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1901.084190] env[68443]: DEBUG oslo.service.loopingcall [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.084674] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6333b256-471f-485d-b099-21fa82349319] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1901.084905] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9246c361-7e81-4ff1-9610-20a07bf09df6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.105054] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1901.105054] env[68443]: value = "task-3374061" [ 1901.105054] env[68443]: _type = "Task" [ 1901.105054] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.112766] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374061, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.393669] env[68443]: DEBUG nova.compute.manager [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Received event network-vif-plugged-68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1901.393917] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Acquiring lock "6333b256-471f-485d-b099-21fa82349319-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.394139] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Lock "6333b256-471f-485d-b099-21fa82349319-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.394311] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Lock "6333b256-471f-485d-b099-21fa82349319-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.394478] env[68443]: DEBUG nova.compute.manager [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] No waiting events found dispatching network-vif-plugged-68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1901.394646] env[68443]: WARNING nova.compute.manager [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Received unexpected event network-vif-plugged-68118aaa-0f46-4631-82c7-a437dc7deaeb for instance with vm_state building and task_state spawning. [ 1901.394807] env[68443]: DEBUG nova.compute.manager [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Received event network-changed-68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1901.394959] env[68443]: DEBUG nova.compute.manager [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Refreshing instance network info cache due to event network-changed-68118aaa-0f46-4631-82c7-a437dc7deaeb. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1901.395355] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Acquiring lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.395508] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Acquired lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.395703] env[68443]: DEBUG nova.network.neutron [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Refreshing network info cache for port 68118aaa-0f46-4631-82c7-a437dc7deaeb {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1901.614378] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374061, 'name': CreateVM_Task, 'duration_secs': 0.285608} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.614553] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6333b256-471f-485d-b099-21fa82349319] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1901.619289] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.619481] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.619817] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1901.620093] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1046cf11-5d1e-4523-87b8-e427be14b439 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.624648] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1901.624648] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52fb6483-90d7-3c93-91e5-cd6fc12987f7" [ 1901.624648] env[68443]: _type = "Task" [ 1901.624648] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.633404] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52fb6483-90d7-3c93-91e5-cd6fc12987f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.651030] env[68443]: DEBUG nova.network.neutron [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Updated VIF entry in instance network info cache for port 68118aaa-0f46-4631-82c7-a437dc7deaeb. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1901.651030] env[68443]: DEBUG nova.network.neutron [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] [instance: 6333b256-471f-485d-b099-21fa82349319] Updating instance_info_cache with network_info: [{"id": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "address": "fa:16:3e:ba:db:6c", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68118aaa-0f", "ovs_interfaceid": "68118aaa-0f46-4631-82c7-a437dc7deaeb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.660147] env[68443]: DEBUG oslo_concurrency.lockutils [req-13d09249-4392-46fb-a521-b8d5d88f9702 req-14cf2d6e-0ee7-4a62-a120-356bcfb9fcdb service nova] Releasing lock "refresh_cache-6333b256-471f-485d-b099-21fa82349319" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.134663] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.135695] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1902.135695] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.347327] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6333b256-471f-485d-b099-21fa82349319" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.868511] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.825765] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.825746] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.844971] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.845298] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.845501] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.845756] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1934.846778] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d95ef2-4276-439e-a529-2bdf980e5801 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.859030] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514ce43d-b447-479a-8c50-88651e5f514b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.873431] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2615c841-324d-411b-8afc-632fe3a2f5b0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.880025] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a442156e-43e8-4c32-986b-0612ee5eced4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.909693] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180981MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1934.909853] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.910053] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.043819] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.043988] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044140] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044266] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044386] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044504] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044618] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044731] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044844] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.044955] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1935.057847] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1935.069840] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1935.082577] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1935.082577] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1935.082577] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1935.255758] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c6ee36-fe12-494b-bbd0-037a05625ac1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.262927] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a94990-949e-43cc-8f11-65980a8aec31 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.294504] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d61bc62-27af-4ac6-b33a-ec01ff45987f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.302231] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235ef6f8-302c-4561-8069-a8611a0d0ce0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.315259] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1935.323515] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1935.342379] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1935.342601] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.433s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.343990] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1936.343990] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1936.343990] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1936.378391] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.378916] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380672] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380962] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380962] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380962] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1936.380962] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1937.824452] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1938.825448] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1938.825819] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1939.825575] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.820424] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.824575] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1948.486438] env[68443]: WARNING oslo_vmware.rw_handles [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1948.486438] env[68443]: ERROR oslo_vmware.rw_handles [ 1948.487086] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1948.489835] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1948.490108] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Copying Virtual Disk [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/f15c30cf-3773-4154-ac05-3d40258ebd32/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1948.490400] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e954f1ba-2168-4190-8f40-95b4bcb2e52d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.498419] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1948.498419] env[68443]: value = "task-3374062" [ 1948.498419] env[68443]: _type = "Task" [ 1948.498419] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.506072] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.009226] env[68443]: DEBUG oslo_vmware.exceptions [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1949.009512] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.010097] env[68443]: ERROR nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1949.010097] env[68443]: Faults: ['InvalidArgument'] [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Traceback (most recent call last): [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] yield resources [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self.driver.spawn(context, instance, image_meta, [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self._fetch_image_if_missing(context, vi) [ 1949.010097] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] image_cache(vi, tmp_image_ds_loc) [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] vm_util.copy_virtual_disk( [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] session._wait_for_task(vmdk_copy_task) [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return self.wait_for_task(task_ref) [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return evt.wait() [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] result = hub.switch() [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1949.010545] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return self.greenlet.switch() [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self.f(*self.args, **self.kw) [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] raise exceptions.translate_fault(task_info.error) [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Faults: ['InvalidArgument'] [ 1949.011015] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] [ 1949.011015] env[68443]: INFO nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Terminating instance [ 1949.011993] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.012222] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.012455] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d589f4b6-f87c-4e87-a87e-62bf3cad198e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.014523] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1949.014733] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1949.015408] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61164319-93d5-49a7-8f4a-e4dcfd3c423d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.022104] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1949.022320] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f176b457-c6c1-49fc-abf5-0432f7fa757d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.024331] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.024501] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1949.025389] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed148295-08bf-4a6c-bc99-6ae86321b22d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.029771] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1949.029771] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52a5512b-3339-a630-da65-0d0b71ddee6e" [ 1949.029771] env[68443]: _type = "Task" [ 1949.029771] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.036661] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52a5512b-3339-a630-da65-0d0b71ddee6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.097382] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1949.097646] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1949.097846] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleting the datastore file [datastore1] 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.098124] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73e45cb5-c250-4e96-ad63-73b30841a103 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.103665] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 1949.103665] env[68443]: value = "task-3374064" [ 1949.103665] env[68443]: _type = "Task" [ 1949.103665] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.110842] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374064, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.539557] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1949.539935] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating directory with path [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.540098] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd36d504-7545-4246-a34c-de8d324d9d56 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.551483] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Created directory with path [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.551684] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Fetch image to [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1949.551919] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1949.552705] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66467588-3994-42ca-b502-d30fc2cd4f42 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.559241] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96c8f45-752b-40b8-a291-e5dd5d021622 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.568436] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9505a638-82a6-4995-9fd7-88ea414fcfdb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.599956] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577a6d7d-5926-483d-965d-82b4f5a20e82 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.608213] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-81fccb9c-811c-4b28-8a46-3ec90c910493 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.614149] env[68443]: DEBUG oslo_vmware.api [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374064, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063313} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.614392] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.614593] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1949.614740] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1949.614913] env[68443]: INFO nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1949.617042] env[68443]: DEBUG nova.compute.claims [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1949.617223] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.617438] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.630998] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1949.779443] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1949.842460] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1949.842648] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1949.929898] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c6d963-ed0b-4c5c-a156-3e077fcb57b8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.936512] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fede97-ead6-4282-a6cd-2901a23a58d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.965383] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898c1ad1-4cfe-420c-ab92-3798bae6ee0c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.972320] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29d1aa6-2add-4c6f-a3a8-f8ff719426e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.984655] env[68443]: DEBUG nova.compute.provider_tree [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1949.993506] env[68443]: DEBUG nova.scheduler.client.report [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1950.012775] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.395s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.013344] env[68443]: ERROR nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1950.013344] env[68443]: Faults: ['InvalidArgument'] [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Traceback (most recent call last): [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self.driver.spawn(context, instance, image_meta, [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self._fetch_image_if_missing(context, vi) [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] image_cache(vi, tmp_image_ds_loc) [ 1950.013344] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] vm_util.copy_virtual_disk( [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] session._wait_for_task(vmdk_copy_task) [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return self.wait_for_task(task_ref) [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return evt.wait() [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] result = hub.switch() [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] return self.greenlet.switch() [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1950.014110] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] self.f(*self.args, **self.kw) [ 1950.015346] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1950.015346] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] raise exceptions.translate_fault(task_info.error) [ 1950.015346] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1950.015346] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Faults: ['InvalidArgument'] [ 1950.015346] env[68443]: ERROR nova.compute.manager [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] [ 1950.015346] env[68443]: DEBUG nova.compute.utils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1950.015693] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Build of instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba was re-scheduled: A specified parameter was not correct: fileType [ 1950.015693] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1950.015959] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1950.016145] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1950.016483] env[68443]: DEBUG nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1950.016645] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1950.530481] env[68443]: DEBUG nova.network.neutron [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.550720] env[68443]: INFO nova.compute.manager [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Took 0.53 seconds to deallocate network for instance. [ 1950.659698] env[68443]: INFO nova.scheduler.client.report [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted allocations for instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba [ 1950.681090] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a03a72cc-bf61-43b3-8f5a-088f7cfac6cc tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.236s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.682313] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.453s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.682486] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.682689] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.683398] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.686298] env[68443]: INFO nova.compute.manager [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Terminating instance [ 1950.688342] env[68443]: DEBUG nova.compute.manager [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1950.688584] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1950.688863] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5dc4b16d-a5d6-4a73-b98f-b25f0a9bc495 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.693016] env[68443]: DEBUG nova.compute.manager [None req-38161715-85ae-4ca4-a0e2-94d39f38aae1 tempest-ServerTagsTestJSON-268279256 tempest-ServerTagsTestJSON-268279256-project-member] [instance: a81ad5e2-d4bb-4ef0-a268-c7012538821d] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1950.699165] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc06d0c-2b37-4061-91f8-e3a8cc21d9b6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.719751] env[68443]: DEBUG nova.compute.manager [None req-38161715-85ae-4ca4-a0e2-94d39f38aae1 tempest-ServerTagsTestJSON-268279256 tempest-ServerTagsTestJSON-268279256-project-member] [instance: a81ad5e2-d4bb-4ef0-a268-c7012538821d] Instance disappeared before build. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1950.730882] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba could not be found. [ 1950.731098] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1950.731281] env[68443]: INFO nova.compute.manager [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1950.731531] env[68443]: DEBUG oslo.service.loopingcall [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.731958] env[68443]: DEBUG nova.compute.manager [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1950.732074] env[68443]: DEBUG nova.network.neutron [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1950.748472] env[68443]: DEBUG oslo_concurrency.lockutils [None req-38161715-85ae-4ca4-a0e2-94d39f38aae1 tempest-ServerTagsTestJSON-268279256 tempest-ServerTagsTestJSON-268279256-project-member] Lock "a81ad5e2-d4bb-4ef0-a268-c7012538821d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.322s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.756408] env[68443]: DEBUG nova.network.neutron [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.762906] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1950.765911] env[68443]: INFO nova.compute.manager [-] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] Took 0.03 seconds to deallocate network for instance. [ 1950.815826] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.816199] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.817631] env[68443]: INFO nova.compute.claims [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1950.854031] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4349e706-87bf-4150-a6e7-4d993a7a57d2 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.855083] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 56.774s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.855338] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba] During sync_power_state the instance has a pending task (deleting). Skip. [ 1950.855550] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "0bb3bf54-8b3a-44e6-99f1-f0efcf8684ba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.011686] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa06fc3-718d-4c84-b979-0196c43ac67f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.019078] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2091dd67-93c5-4c86-a50c-1da500248bf9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.049278] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e5b7df-6955-49a3-b5f2-556d9ad68ca1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.056810] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640fd04c-8bae-4e89-99d6-c079d3c9f019 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.069310] env[68443]: DEBUG nova.compute.provider_tree [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1951.097217] env[68443]: DEBUG nova.scheduler.client.report [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1951.112531] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.112976] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1951.144421] env[68443]: DEBUG nova.compute.utils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1951.145846] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1951.146025] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1951.155278] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1951.208616] env[68443]: DEBUG nova.policy [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8eb39994728c486ab572c6fd7acd1bb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9d22d78a3f8410c858ba3f85fb453c3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 1951.219609] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1951.245362] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1951.245609] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1951.245765] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1951.245946] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1951.246168] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1951.246257] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1951.246461] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1951.246625] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1951.246790] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1951.246969] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1951.247160] env[68443]: DEBUG nova.virt.hardware [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1951.248018] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd278d28-84bc-4dee-84ca-6e30ab26d505 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.256031] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e34678-5947-4d3f-893d-25abd3b8942c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.516057] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Successfully created port: c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1952.098657] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Successfully updated port: c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1952.109266] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.109428] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.109551] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1952.151747] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1952.308788] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Updating instance_info_cache with network_info: [{"id": "c1434313-c90c-436a-ac86-3746326d1605", "address": "fa:16:3e:50:58:4d", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1434313-c9", "ovs_interfaceid": "c1434313-c90c-436a-ac86-3746326d1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.321401] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.321698] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance network_info: |[{"id": "c1434313-c90c-436a-ac86-3746326d1605", "address": "fa:16:3e:50:58:4d", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1434313-c9", "ovs_interfaceid": "c1434313-c90c-436a-ac86-3746326d1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1952.322112] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:58:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1434313-c90c-436a-ac86-3746326d1605', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1952.329558] env[68443]: DEBUG oslo.service.loopingcall [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1952.330070] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1952.330297] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-029858c9-9944-4569-983e-958004b41f8d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.350102] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1952.350102] env[68443]: value = "task-3374065" [ 1952.350102] env[68443]: _type = "Task" [ 1952.350102] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.357622] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374065, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.556596] env[68443]: DEBUG nova.compute.manager [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Received event network-vif-plugged-c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1952.556864] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Acquiring lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.557147] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.557303] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.558034] env[68443]: DEBUG nova.compute.manager [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] No waiting events found dispatching network-vif-plugged-c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1952.558034] env[68443]: WARNING nova.compute.manager [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Received unexpected event network-vif-plugged-c1434313-c90c-436a-ac86-3746326d1605 for instance with vm_state building and task_state spawning. [ 1952.558034] env[68443]: DEBUG nova.compute.manager [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Received event network-changed-c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1952.558220] env[68443]: DEBUG nova.compute.manager [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Refreshing instance network info cache due to event network-changed-c1434313-c90c-436a-ac86-3746326d1605. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1952.558381] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Acquiring lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.558522] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Acquired lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.558687] env[68443]: DEBUG nova.network.neutron [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Refreshing network info cache for port c1434313-c90c-436a-ac86-3746326d1605 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1952.824533] env[68443]: DEBUG nova.network.neutron [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Updated VIF entry in instance network info cache for port c1434313-c90c-436a-ac86-3746326d1605. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1952.824902] env[68443]: DEBUG nova.network.neutron [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Updating instance_info_cache with network_info: [{"id": "c1434313-c90c-436a-ac86-3746326d1605", "address": "fa:16:3e:50:58:4d", "network": {"id": "ffd7317a-996b-48eb-8ed3-08829bc8b488", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1411254965-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9d22d78a3f8410c858ba3f85fb453c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1434313-c9", "ovs_interfaceid": "c1434313-c90c-436a-ac86-3746326d1605", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.834075] env[68443]: DEBUG oslo_concurrency.lockutils [req-a68a1073-3024-4166-9fc2-5cf4ef19608a req-381b0107-7b6f-4221-b53a-ca6c11bceb55 service nova] Releasing lock "refresh_cache-a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.859964] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374065, 'name': CreateVM_Task, 'duration_secs': 0.324792} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.860150] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1952.860818] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.862572] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.862572] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1952.862572] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7151a783-b26f-4bc1-a1a7-568a8c4afda7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.866081] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1952.866081] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]524e37c9-27d6-49aa-2824-b017dd4560c0" [ 1952.866081] env[68443]: _type = "Task" [ 1952.866081] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.875659] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]524e37c9-27d6-49aa-2824-b017dd4560c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.376410] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.376811] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1953.376872] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.824945] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.826845] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.825436] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.837330] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.837630] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.837739] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.837898] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1994.839202] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab9c85f-9ba2-4e42-bb3f-2245b7c2e308 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.848296] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7758ab0f-276d-49ad-adf5-095f7cae7521 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.863227] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3eac2f-8f77-405d-97be-8130a1421d73 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.869229] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c372a25-c43c-4dd8-bac9-8b709cf69e2f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.897588] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180986MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1994.897741] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.897927] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.976577] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 7366efe5-c640-4689-97a1-fba0ac431b12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.976750] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.976857] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.976981] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977217] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977217] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977331] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977451] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977562] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1994.977674] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1995.006829] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1995.017624] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1995.017900] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1995.018080] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1995.153537] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d817cb7a-64de-4f2e-9a7c-79ff2bc4d611 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.161470] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e53718-e28c-459e-9a0c-de9f7c397c72 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.193923] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91aca037-6d1d-431e-9836-7c056dc5c793 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.201235] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5459c8-55ba-45a0-91de-1493a46dc13a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.214651] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1995.223048] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1995.237194] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1995.237368] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.339s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.236610] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.236972] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1996.236972] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1996.256469] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.256690] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.256769] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.256894] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257053] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257181] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257300] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257419] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257535] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257649] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1996.257762] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1997.825137] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.648470] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.824554] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.824704] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1999.082725] env[68443]: WARNING oslo_vmware.rw_handles [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1999.082725] env[68443]: ERROR oslo_vmware.rw_handles [ 1999.083656] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1999.085962] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1999.086220] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Copying Virtual Disk [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/6e0283f4-724b-4686-b5cb-a6dbbe46f981/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1999.086503] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-077d5338-3f88-420d-ba39-ef3367d1e69c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.094512] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1999.094512] env[68443]: value = "task-3374066" [ 1999.094512] env[68443]: _type = "Task" [ 1999.094512] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.101964] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.605177] env[68443]: DEBUG oslo_vmware.exceptions [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1999.605480] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1999.606039] env[68443]: ERROR nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1999.606039] env[68443]: Faults: ['InvalidArgument'] [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Traceback (most recent call last): [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] yield resources [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self.driver.spawn(context, instance, image_meta, [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self._fetch_image_if_missing(context, vi) [ 1999.606039] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] image_cache(vi, tmp_image_ds_loc) [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] vm_util.copy_virtual_disk( [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] session._wait_for_task(vmdk_copy_task) [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return self.wait_for_task(task_ref) [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return evt.wait() [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] result = hub.switch() [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1999.606627] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return self.greenlet.switch() [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self.f(*self.args, **self.kw) [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] raise exceptions.translate_fault(task_info.error) [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Faults: ['InvalidArgument'] [ 1999.607028] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] [ 1999.607028] env[68443]: INFO nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Terminating instance [ 1999.607967] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.608203] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1999.608465] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40531c87-3001-4a26-ab9f-2a5c51d7186c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.610826] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1999.611040] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1999.611824] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979c4a23-21fc-4e2b-bf20-bd95aa30fb19 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.618880] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1999.619115] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70df6e41-ba7f-4164-bd8c-dfded13b089a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.621401] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1999.621581] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1999.622594] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5cbe2b-c0d2-4a90-898a-f66a3499109f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.627494] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 1999.627494] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]520db627-5574-06ef-76dc-3cca6f393c4c" [ 1999.627494] env[68443]: _type = "Task" [ 1999.627494] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.635855] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]520db627-5574-06ef-76dc-3cca6f393c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.697812] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1999.698079] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1999.698231] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleting the datastore file [datastore1] 7366efe5-c640-4689-97a1-fba0ac431b12 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1999.698504] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0937c1bf-a827-4179-a8ff-a116ae3306f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.705434] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for the task: (returnval){ [ 1999.705434] env[68443]: value = "task-3374068" [ 1999.705434] env[68443]: _type = "Task" [ 1999.705434] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.715455] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.138472] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2000.138780] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2000.138914] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b0419fc-a40a-423a-b006-df03d631e141 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.149896] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2000.150094] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Fetch image to [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2000.150273] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2000.150977] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf92dd19-dbc8-442c-b9a9-7453479ecc03 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.157132] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8c5c37-fbf5-46dc-bd16-bb4cdeb25a62 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.165769] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303fcddd-90c8-4694-bebc-858ff798440f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.194923] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ca83b9-d423-424a-b67f-a957b75f456d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.200167] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fed51b85-5ea8-42e8-a07b-9a0b553aebaf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.212905] env[68443]: DEBUG oslo_vmware.api [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Task: {'id': task-3374068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070483} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.213152] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.213333] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2000.213507] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2000.213688] env[68443]: INFO nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2000.215776] env[68443]: DEBUG nova.compute.claims [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2000.216167] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.216167] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.220857] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2000.272465] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2000.335341] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2000.335523] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2000.438774] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188bfa44-295d-4c58-80c4-42192c647e8e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.446135] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe5cefa-68be-4a8c-8689-1a2a739cae21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.476883] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8422c79-c06e-4e06-9d0b-1e1a7453223e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.483754] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a44f6d-c353-4489-a655-791a7545f7e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.496770] env[68443]: DEBUG nova.compute.provider_tree [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2000.504977] env[68443]: DEBUG nova.scheduler.client.report [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2000.520356] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.520947] env[68443]: ERROR nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2000.520947] env[68443]: Faults: ['InvalidArgument'] [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Traceback (most recent call last): [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self.driver.spawn(context, instance, image_meta, [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self._fetch_image_if_missing(context, vi) [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] image_cache(vi, tmp_image_ds_loc) [ 2000.520947] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] vm_util.copy_virtual_disk( [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] session._wait_for_task(vmdk_copy_task) [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return self.wait_for_task(task_ref) [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return evt.wait() [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] result = hub.switch() [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] return self.greenlet.switch() [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2000.521376] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] self.f(*self.args, **self.kw) [ 2000.521778] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2000.521778] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] raise exceptions.translate_fault(task_info.error) [ 2000.521778] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2000.521778] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Faults: ['InvalidArgument'] [ 2000.521778] env[68443]: ERROR nova.compute.manager [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] [ 2000.521778] env[68443]: DEBUG nova.compute.utils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2000.523090] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Build of instance 7366efe5-c640-4689-97a1-fba0ac431b12 was re-scheduled: A specified parameter was not correct: fileType [ 2000.523090] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2000.523474] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2000.523665] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2000.523858] env[68443]: DEBUG nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2000.524043] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2000.835279] env[68443]: DEBUG nova.network.neutron [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.845241] env[68443]: INFO nova.compute.manager [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Took 0.32 seconds to deallocate network for instance. [ 2000.934540] env[68443]: INFO nova.scheduler.client.report [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Deleted allocations for instance 7366efe5-c640-4689-97a1-fba0ac431b12 [ 2000.957100] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e68ed9fd-62e7-45b1-9fa4-a0b729b3b43f tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 642.177s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.957878] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.433s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.958176] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Acquiring lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.958301] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.958537] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.960962] env[68443]: INFO nova.compute.manager [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Terminating instance [ 2000.963019] env[68443]: DEBUG nova.compute.manager [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2000.963219] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2000.963811] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-566c7d9f-4880-4447-8b9f-b7015d4e2050 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.971603] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.976158] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d3c489-0bc1-4abc-a2fe-c4a585c06344 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.986752] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2001.009607] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7366efe5-c640-4689-97a1-fba0ac431b12 could not be found. [ 2001.009607] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2001.009819] env[68443]: INFO nova.compute.manager [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2001.010083] env[68443]: DEBUG oslo.service.loopingcall [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2001.010378] env[68443]: DEBUG nova.compute.manager [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2001.010467] env[68443]: DEBUG nova.network.neutron [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2001.042452] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.042709] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.044313] env[68443]: INFO nova.compute.claims [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2001.081623] env[68443]: DEBUG nova.network.neutron [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.089736] env[68443]: INFO nova.compute.manager [-] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] Took 0.08 seconds to deallocate network for instance. [ 2001.174873] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6ad356a3-8b34-48aa-a724-763e158d9d7b tempest-AttachVolumeShelveTestJSON-7531367 tempest-AttachVolumeShelveTestJSON-7531367-project-member] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.217s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.175732] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.094s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.175926] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 7366efe5-c640-4689-97a1-fba0ac431b12] During sync_power_state the instance has a pending task (deleting). Skip. [ 2001.176118] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "7366efe5-c640-4689-97a1-fba0ac431b12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.220139] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e06b162-8cf3-435a-92af-8c080bd2f2b2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.227511] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec33884e-61ef-41ec-8dea-3a1ce4eb1a9d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.257092] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7289fd-c2f3-4419-bbaa-d6da9b890ff0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.264034] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ce1f99-a675-47c4-bdb3-03466959f32e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.276759] env[68443]: DEBUG nova.compute.provider_tree [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.285682] env[68443]: DEBUG nova.scheduler.client.report [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.298897] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.256s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.299510] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2001.331467] env[68443]: DEBUG nova.compute.utils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2001.332869] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2001.333074] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2001.341141] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2001.397382] env[68443]: DEBUG nova.policy [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5394eee9936641f986136eee619d6c2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d4692d4df3948b98eae443eebb5239b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2001.405196] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2001.429978] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2001.430255] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2001.430412] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2001.430594] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2001.430738] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2001.430886] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2001.431104] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2001.431265] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2001.431426] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2001.431585] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2001.431754] env[68443]: DEBUG nova.virt.hardware [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2001.432667] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbccf28-55fa-4472-85e9-59ff61418b39 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.440273] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f7e64a-d3cf-4be7-8fed-f00dbe70383a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.694386] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Successfully created port: b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2001.822382] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.824903] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.455626] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Successfully updated port: b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2002.469275] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.469438] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.469560] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2002.506785] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2002.667729] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Updating instance_info_cache with network_info: [{"id": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "address": "fa:16:3e:e9:c2:26", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb229b3de-fe", "ovs_interfaceid": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.678395] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.678679] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance network_info: |[{"id": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "address": "fa:16:3e:e9:c2:26", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb229b3de-fe", "ovs_interfaceid": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2002.679083] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:c2:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b229b3de-fe90-45d3-a333-b33ff95d57d6', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2002.686839] env[68443]: DEBUG oslo.service.loopingcall [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2002.687312] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2002.687543] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1b71262-c65a-4f7d-8c3c-f3f7bdfb2f9c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.709056] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2002.709056] env[68443]: value = "task-3374069" [ 2002.709056] env[68443]: _type = "Task" [ 2002.709056] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.715826] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374069, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.895157] env[68443]: DEBUG nova.compute.manager [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Received event network-vif-plugged-b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2002.895408] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Acquiring lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.895667] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.895871] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.896252] env[68443]: DEBUG nova.compute.manager [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] No waiting events found dispatching network-vif-plugged-b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2002.896467] env[68443]: WARNING nova.compute.manager [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Received unexpected event network-vif-plugged-b229b3de-fe90-45d3-a333-b33ff95d57d6 for instance with vm_state building and task_state spawning. [ 2002.896637] env[68443]: DEBUG nova.compute.manager [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Received event network-changed-b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2002.896794] env[68443]: DEBUG nova.compute.manager [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Refreshing instance network info cache due to event network-changed-b229b3de-fe90-45d3-a333-b33ff95d57d6. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2002.896975] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Acquiring lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.897167] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Acquired lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.897342] env[68443]: DEBUG nova.network.neutron [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Refreshing network info cache for port b229b3de-fe90-45d3-a333-b33ff95d57d6 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2003.153507] env[68443]: DEBUG nova.network.neutron [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Updated VIF entry in instance network info cache for port b229b3de-fe90-45d3-a333-b33ff95d57d6. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2003.153849] env[68443]: DEBUG nova.network.neutron [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Updating instance_info_cache with network_info: [{"id": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "address": "fa:16:3e:e9:c2:26", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb229b3de-fe", "ovs_interfaceid": "b229b3de-fe90-45d3-a333-b33ff95d57d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.164646] env[68443]: DEBUG oslo_concurrency.lockutils [req-d7ad5f79-32dd-4b2c-897f-d685a829248a req-3886fae9-5f2a-4e47-a25f-f6e0b6d66c92 service nova] Releasing lock "refresh_cache-75ba0bb9-0498-4434-aed1-b03aa0bcaf03" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.217417] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374069, 'name': CreateVM_Task, 'duration_secs': 0.274705} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.217578] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2003.224331] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2003.224560] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2003.224931] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2003.225220] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e9b63bd-e988-411a-963e-6244832a485d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.229412] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2003.229412] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]525e93ec-acfa-56e2-6c53-8940ab42baa9" [ 2003.229412] env[68443]: _type = "Task" [ 2003.229412] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.236987] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]525e93ec-acfa-56e2-6c53-8940ab42baa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.742673] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.742939] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2003.743268] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.825444] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.820942] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.012188] env[68443]: WARNING oslo_vmware.rw_handles [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2048.012188] env[68443]: ERROR oslo_vmware.rw_handles [ 2048.012887] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2048.014865] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2048.015191] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Copying Virtual Disk [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/b56c2e67-4dd5-4535-9076-9030d59e2ecc/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2048.015469] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-395129ec-96fe-49c2-9cfc-35bb9ef135b3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.022958] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 2048.022958] env[68443]: value = "task-3374070" [ 2048.022958] env[68443]: _type = "Task" [ 2048.022958] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.030755] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.532969] env[68443]: DEBUG oslo_vmware.exceptions [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2048.533272] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.533807] env[68443]: ERROR nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2048.533807] env[68443]: Faults: ['InvalidArgument'] [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Traceback (most recent call last): [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] yield resources [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self.driver.spawn(context, instance, image_meta, [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self._fetch_image_if_missing(context, vi) [ 2048.533807] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] image_cache(vi, tmp_image_ds_loc) [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] vm_util.copy_virtual_disk( [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] session._wait_for_task(vmdk_copy_task) [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return self.wait_for_task(task_ref) [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return evt.wait() [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] result = hub.switch() [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2048.534430] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return self.greenlet.switch() [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self.f(*self.args, **self.kw) [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] raise exceptions.translate_fault(task_info.error) [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Faults: ['InvalidArgument'] [ 2048.534911] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] [ 2048.534911] env[68443]: INFO nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Terminating instance [ 2048.535744] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.535964] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2048.536223] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-edf8c0c1-bae8-47f9-92bb-68aaf8661a68 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.538554] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2048.538750] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2048.539540] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b90c400-73ba-49b1-bc97-402777dca6f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.546279] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2048.546482] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-219ba755-b2b7-469b-b496-be35fd7ae5d9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.548531] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2048.548706] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2048.550048] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55399863-e4f8-4800-b843-67e97ebea647 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.554514] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2048.554514] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]524f6773-22e9-d5ee-0294-6f5a8e0924de" [ 2048.554514] env[68443]: _type = "Task" [ 2048.554514] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.561298] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]524f6773-22e9-d5ee-0294-6f5a8e0924de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.619543] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2048.619752] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2048.619875] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleting the datastore file [datastore1] 04fc458a-a928-43ef-8fd0-bfc49989d2b1 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2048.620161] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5bfba85-a7af-4323-9a84-8c2004a5949d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.625766] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 2048.625766] env[68443]: value = "task-3374072" [ 2048.625766] env[68443]: _type = "Task" [ 2048.625766] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.633011] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374072, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.928016] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.064445] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2049.064761] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2049.064911] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c82ced31-082f-4047-a646-82944d554f86 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.076688] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2049.076867] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Fetch image to [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2049.077047] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2049.077862] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eecf785-21f5-40a1-a9b6-bdf109dfd0ad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.084173] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13c0479-cb3b-4d43-a2af-a2d05d0114cb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.093918] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b624fa1-4684-42fc-b21d-618b2cea15b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.123666] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fa916f-7609-4579-a253-b22f0c35e1c9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.134914] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1b703376-26fb-4463-a483-9763f1781dcf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.136471] env[68443]: DEBUG oslo_vmware.api [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374072, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077323} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.136703] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2049.136872] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2049.137055] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2049.137231] env[68443]: INFO nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2049.139245] env[68443]: DEBUG nova.compute.claims [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2049.139454] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.139679] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.157413] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2049.209588] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2049.272916] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2049.273147] env[68443]: DEBUG oslo_vmware.rw_handles [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2049.357079] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8dedd8-4b64-452d-8d65-e742fe178b5c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.364601] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03b32d2-d55f-4630-a145-8f0518f00d93 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.394234] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1319b81-f30b-4202-a19b-45714b550134 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.400850] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30ff20d-70ec-45e7-b462-c6bf01e73dc5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.413473] env[68443]: DEBUG nova.compute.provider_tree [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2049.421849] env[68443]: DEBUG nova.scheduler.client.report [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2049.435330] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.435837] env[68443]: ERROR nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2049.435837] env[68443]: Faults: ['InvalidArgument'] [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Traceback (most recent call last): [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self.driver.spawn(context, instance, image_meta, [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self._fetch_image_if_missing(context, vi) [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] image_cache(vi, tmp_image_ds_loc) [ 2049.435837] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] vm_util.copy_virtual_disk( [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] session._wait_for_task(vmdk_copy_task) [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return self.wait_for_task(task_ref) [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return evt.wait() [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] result = hub.switch() [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] return self.greenlet.switch() [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2049.436253] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] self.f(*self.args, **self.kw) [ 2049.436738] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2049.436738] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] raise exceptions.translate_fault(task_info.error) [ 2049.436738] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2049.436738] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Faults: ['InvalidArgument'] [ 2049.436738] env[68443]: ERROR nova.compute.manager [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] [ 2049.436738] env[68443]: DEBUG nova.compute.utils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2049.438015] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Build of instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 was re-scheduled: A specified parameter was not correct: fileType [ 2049.438015] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2049.438394] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2049.438564] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2049.438735] env[68443]: DEBUG nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2049.438897] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2049.743183] env[68443]: DEBUG nova.network.neutron [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2049.753876] env[68443]: INFO nova.compute.manager [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Took 0.31 seconds to deallocate network for instance. [ 2049.859028] env[68443]: INFO nova.scheduler.client.report [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted allocations for instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 [ 2049.881451] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e29cd9c0-9db5-400f-a71a-a83f2b2418f3 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 641.039s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.882605] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 444.860s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.882933] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.883076] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.883213] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2049.885358] env[68443]: INFO nova.compute.manager [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Terminating instance [ 2049.887099] env[68443]: DEBUG nova.compute.manager [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2049.887327] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2049.887775] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1e61d92-92f9-42f5-89ec-116cd73611f3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.892461] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2049.899328] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf7142f-0867-4b5c-bae4-35a6347e5266 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.929118] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04fc458a-a928-43ef-8fd0-bfc49989d2b1 could not be found. [ 2049.929336] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2049.929517] env[68443]: INFO nova.compute.manager [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2049.929754] env[68443]: DEBUG oslo.service.loopingcall [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2049.931936] env[68443]: DEBUG nova.compute.manager [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2049.932061] env[68443]: DEBUG nova.network.neutron [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2049.945948] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.946194] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.947601] env[68443]: INFO nova.compute.claims [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2049.958815] env[68443]: DEBUG nova.network.neutron [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2049.972122] env[68443]: INFO nova.compute.manager [-] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] Took 0.04 seconds to deallocate network for instance. [ 2050.057049] env[68443]: DEBUG oslo_concurrency.lockutils [None req-7df12fa1-9508-4c22-a1a6-5a5471d4c643 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.057894] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 155.976s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.058097] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 04fc458a-a928-43ef-8fd0-bfc49989d2b1] During sync_power_state the instance has a pending task (deleting). Skip. [ 2050.058275] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "04fc458a-a928-43ef-8fd0-bfc49989d2b1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.115020] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3d2984-4ab0-4ee2-b00d-b7c6ba36fa86 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.121897] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e8c07f-2336-4f93-b3d6-bf859fae66a2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.151733] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f5360-033a-4ecc-81e6-e10b6ea5de81 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.159750] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ee52c5-c8a3-4c14-83d6-d56bccc4dfaa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.172921] env[68443]: DEBUG nova.compute.provider_tree [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2050.181634] env[68443]: DEBUG nova.scheduler.client.report [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2050.194631] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.248s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.195255] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2050.225617] env[68443]: DEBUG nova.compute.utils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2050.227023] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2050.227174] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2050.235029] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2050.286296] env[68443]: DEBUG nova.policy [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd97a934ab8f48e2bf883cc4dddcdde1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dafe4b3f7d243caa51d39bfc74a4c11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2050.296296] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2050.322994] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2050.323254] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2050.323413] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2050.323594] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2050.323741] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2050.323887] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2050.324158] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2050.324344] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2050.324512] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2050.324677] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2050.324849] env[68443]: DEBUG nova.virt.hardware [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2050.325736] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f33996-6ba2-4e64-937c-0459bf918b2a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.333643] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2e074e-7d43-4ce7-9802-58d2c140213d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.566166] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Successfully created port: fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2051.181435] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Successfully updated port: fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2051.194695] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.194841] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.194981] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2051.253138] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2051.435475] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Updating instance_info_cache with network_info: [{"id": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "address": "fa:16:3e:df:fa:67", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdab4956-1d", "ovs_interfaceid": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2051.446671] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2051.447093] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance network_info: |[{"id": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "address": "fa:16:3e:df:fa:67", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdab4956-1d", "ovs_interfaceid": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2051.447449] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:fa:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdab4956-1ddd-4a13-a972-b1d1565dc3df', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2051.455671] env[68443]: DEBUG oslo.service.loopingcall [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2051.456243] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2051.456501] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-531bc061-0fd5-48a4-beec-d58d6fe5c307 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.479283] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2051.479283] env[68443]: value = "task-3374073" [ 2051.479283] env[68443]: _type = "Task" [ 2051.479283] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.489864] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374073, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.788007] env[68443]: DEBUG nova.compute.manager [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Received event network-vif-plugged-fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2051.788369] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Acquiring lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.788702] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.788975] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.789270] env[68443]: DEBUG nova.compute.manager [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] No waiting events found dispatching network-vif-plugged-fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2051.789557] env[68443]: WARNING nova.compute.manager [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Received unexpected event network-vif-plugged-fdab4956-1ddd-4a13-a972-b1d1565dc3df for instance with vm_state building and task_state spawning. [ 2051.789837] env[68443]: DEBUG nova.compute.manager [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Received event network-changed-fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2051.790116] env[68443]: DEBUG nova.compute.manager [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Refreshing instance network info cache due to event network-changed-fdab4956-1ddd-4a13-a972-b1d1565dc3df. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2051.790446] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Acquiring lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.790661] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Acquired lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.790899] env[68443]: DEBUG nova.network.neutron [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Refreshing network info cache for port fdab4956-1ddd-4a13-a972-b1d1565dc3df {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2051.989396] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374073, 'name': CreateVM_Task, 'duration_secs': 0.327166} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.989658] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2051.990362] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2051.990531] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2051.990852] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2051.991126] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38c8292d-b61b-4c6d-b54e-eb6bd1861af1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.996320] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 2051.996320] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52181a19-765d-0472-0cef-eee96d7be659" [ 2051.996320] env[68443]: _type = "Task" [ 2051.996320] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.006873] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52181a19-765d-0472-0cef-eee96d7be659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.067945] env[68443]: DEBUG nova.network.neutron [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Updated VIF entry in instance network info cache for port fdab4956-1ddd-4a13-a972-b1d1565dc3df. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2052.068342] env[68443]: DEBUG nova.network.neutron [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Updating instance_info_cache with network_info: [{"id": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "address": "fa:16:3e:df:fa:67", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdab4956-1d", "ovs_interfaceid": "fdab4956-1ddd-4a13-a972-b1d1565dc3df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2052.078865] env[68443]: DEBUG oslo_concurrency.lockutils [req-0e563faa-3f4c-438a-a95e-de46b34d44c0 req-8ce84fce-79f6-418a-919c-19cc3011a631 service nova] Releasing lock "refresh_cache-da00322f-5482-4511-94a4-2e2f3705fb99" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2052.507868] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2052.508262] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2052.508316] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.829499] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.829890] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.825750] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.837369] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.837802] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.837802] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.837899] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2054.839154] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d80362d-412c-4ab8-bfd7-964426988980 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.849762] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba0acdf-3221-4f49-91f2-c706572cb8f4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.863878] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb7a1fb-b5a8-4917-91a8-c13a8c6961e1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.870854] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e911d7e-35e1-4bd6-b6b1-d998afdeb79d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.902768] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180953MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2054.902768] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2054.902768] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.994468] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 2985403d-348f-473d-ad1f-75fb67d3be12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.994637] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.994765] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.994888] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.995017] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.995146] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.995287] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.995428] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.996077] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.996077] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2054.996077] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2054.996077] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2055.118188] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a5426d-b9c9-4145-9088-b64987c85008 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.125828] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eded7c-13e4-428e-a6e7-ac35665c75e6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.155613] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3f8fb8-e136-4334-b820-460b48cb4066 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.162286] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162312e2-6195-4fb8-a104-b58e1c5dd6be {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.175337] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.184390] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2055.198191] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2055.198380] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.297s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.197598] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2057.197904] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2057.197904] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2057.219596] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.219736] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.219869] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.219994] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220128] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220250] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220369] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220486] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220603] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220717] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2057.220854] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2058.824664] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.825026] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2058.825026] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2062.821336] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.824978] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2067.824793] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.348863] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "da00322f-5482-4511-94a4-2e2f3705fb99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.353026] env[68443]: WARNING oslo_vmware.rw_handles [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2098.353026] env[68443]: ERROR oslo_vmware.rw_handles [ 2098.353747] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2098.355889] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2098.356139] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Copying Virtual Disk [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/e925dffe-0d1d-4612-9181-d4c4f76b26fb/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2098.356457] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d55ec7dc-49a2-4151-bc40-b2903cb9e7b9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.365010] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2098.365010] env[68443]: value = "task-3374074" [ 2098.365010] env[68443]: _type = "Task" [ 2098.365010] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.373360] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.875248] env[68443]: DEBUG oslo_vmware.exceptions [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2098.875565] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2098.876140] env[68443]: ERROR nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2098.876140] env[68443]: Faults: ['InvalidArgument'] [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Traceback (most recent call last): [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] yield resources [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self.driver.spawn(context, instance, image_meta, [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self._fetch_image_if_missing(context, vi) [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2098.876140] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] image_cache(vi, tmp_image_ds_loc) [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] vm_util.copy_virtual_disk( [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] session._wait_for_task(vmdk_copy_task) [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return self.wait_for_task(task_ref) [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return evt.wait() [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] result = hub.switch() [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return self.greenlet.switch() [ 2098.876583] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self.f(*self.args, **self.kw) [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] raise exceptions.translate_fault(task_info.error) [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Faults: ['InvalidArgument'] [ 2098.876999] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] [ 2098.876999] env[68443]: INFO nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Terminating instance [ 2098.879133] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.879392] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2098.880081] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2098.880259] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2098.880487] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-016686c0-078c-4956-bb19-74b119a70529 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.884345] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9783cb4-84f8-46da-848f-7b03925d03a9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.891482] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2098.892600] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bb2cabe-0739-428d-aa40-381bb17602cd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.894141] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2098.894333] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2098.894993] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3971120e-1cd1-49f5-bc5e-13bdb274deca {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.900374] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for the task: (returnval){ [ 2098.900374] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52d3e39a-8918-bad7-a6ac-528e6133866d" [ 2098.900374] env[68443]: _type = "Task" [ 2098.900374] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.910455] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52d3e39a-8918-bad7-a6ac-528e6133866d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.967181] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2098.967409] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2098.967624] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleting the datastore file [datastore1] 2985403d-348f-473d-ad1f-75fb67d3be12 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2098.967915] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-278b8efd-fd32-4206-8632-ec91219bca74 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.973992] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2098.973992] env[68443]: value = "task-3374076" [ 2098.973992] env[68443]: _type = "Task" [ 2098.973992] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.981529] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.410550] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2099.410963] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Creating directory with path [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2099.411049] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-297171d1-8a39-4353-b780-cad05dac66d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.422492] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Created directory with path [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2099.422698] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Fetch image to [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2099.422873] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2099.423584] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96034cae-143b-41f0-9614-05dbb33c268b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.430104] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1d0eb8-e9f2-4c3f-a602-ce3a30d750dd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.439047] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c0f858-7295-40f9-bdfc-8c5feb432851 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.468298] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef35e28-ee70-453c-b478-d1ce8bdd7c93 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.473380] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7972edd1-c328-46fb-99c8-9b23a8494485 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.481894] env[68443]: DEBUG oslo_vmware.api [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07929} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.482133] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2099.482312] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2099.482507] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2099.482681] env[68443]: INFO nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2099.484819] env[68443]: DEBUG nova.compute.claims [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2099.484989] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.485222] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.497739] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2099.553780] env[68443]: DEBUG oslo_vmware.rw_handles [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2099.614402] env[68443]: DEBUG oslo_vmware.rw_handles [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2099.614599] env[68443]: DEBUG oslo_vmware.rw_handles [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2099.703405] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34896182-2229-4fe3-876d-f507445af3e4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.710448] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275681b5-1ab7-46f6-93d9-4eef9f41d2d2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.739403] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f41cb5-2fa4-4933-bb5f-0890c52a0d42 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.746227] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bb54fd-c562-4674-bb1b-2d92bda20129 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.759107] env[68443]: DEBUG nova.compute.provider_tree [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2099.767409] env[68443]: DEBUG nova.scheduler.client.report [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2099.780938] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.781465] env[68443]: ERROR nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2099.781465] env[68443]: Faults: ['InvalidArgument'] [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Traceback (most recent call last): [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self.driver.spawn(context, instance, image_meta, [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self._fetch_image_if_missing(context, vi) [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] image_cache(vi, tmp_image_ds_loc) [ 2099.781465] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] vm_util.copy_virtual_disk( [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] session._wait_for_task(vmdk_copy_task) [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return self.wait_for_task(task_ref) [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return evt.wait() [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] result = hub.switch() [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] return self.greenlet.switch() [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2099.781907] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] self.f(*self.args, **self.kw) [ 2099.782293] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2099.782293] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] raise exceptions.translate_fault(task_info.error) [ 2099.782293] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2099.782293] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Faults: ['InvalidArgument'] [ 2099.782293] env[68443]: ERROR nova.compute.manager [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] [ 2099.782293] env[68443]: DEBUG nova.compute.utils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2099.783622] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Build of instance 2985403d-348f-473d-ad1f-75fb67d3be12 was re-scheduled: A specified parameter was not correct: fileType [ 2099.783622] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2099.783980] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2099.784164] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2099.784360] env[68443]: DEBUG nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2099.784536] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2100.087580] env[68443]: DEBUG nova.network.neutron [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.101237] env[68443]: INFO nova.compute.manager [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Took 0.32 seconds to deallocate network for instance. [ 2100.195191] env[68443]: INFO nova.scheduler.client.report [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted allocations for instance 2985403d-348f-473d-ad1f-75fb67d3be12 [ 2100.215706] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c911a644-c2b8-40b3-89dc-0cc1506d6e5b tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 641.872s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.216506] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 445.015s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.216506] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.216506] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.216711] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.218510] env[68443]: INFO nova.compute.manager [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Terminating instance [ 2100.222204] env[68443]: DEBUG nova.compute.manager [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2100.222372] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2100.222801] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f60404b-2170-4212-ab7b-482563a6d4bd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.233723] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bc94bd-a31f-45ae-b33b-0478895f08d4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.263967] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2985403d-348f-473d-ad1f-75fb67d3be12 could not be found. [ 2100.264201] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2100.264412] env[68443]: INFO nova.compute.manager [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2100.264664] env[68443]: DEBUG oslo.service.loopingcall [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2100.264896] env[68443]: DEBUG nova.compute.manager [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2100.264992] env[68443]: DEBUG nova.network.neutron [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2100.290867] env[68443]: DEBUG nova.network.neutron [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.299076] env[68443]: INFO nova.compute.manager [-] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] Took 0.03 seconds to deallocate network for instance. [ 2100.381942] env[68443]: DEBUG oslo_concurrency.lockutils [None req-463e2e37-48e0-4094-bbb5-a29bec0c8b95 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.382907] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 206.301s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.383109] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 2985403d-348f-473d-ad1f-75fb67d3be12] During sync_power_state the instance has a pending task (deleting). Skip. [ 2100.383287] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "2985403d-348f-473d-ad1f-75fb67d3be12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.827945] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.828393] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.828393] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2114.840532] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.840763] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.840915] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.841082] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2114.842259] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7f90c9-f63c-4f3a-a306-957d60ebae86 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.851326] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0aef1b4-e1e1-4664-967d-f56bf070c26d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.864916] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8e7498-1464-4ee5-96dc-64c31b3150fe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.870758] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fce80d3-0e12-4fa2-ba0d-6f1ccf465acf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.900063] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180956MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2114.900227] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.900449] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.968467] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.968625] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.968752] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.968876] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970039] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970039] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970039] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970039] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970262] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2114.970262] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2114.970262] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2115.084860] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de77b5f0-9d42-45ba-af68-0b258227b62e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.092319] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4699db-3919-446e-9697-37ee4997d61c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.123303] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca40b00-37f1-458f-9830-4142d7d2c25f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.130554] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6cdb78-e505-44a2-9c3e-a7d97aa36c50 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.143311] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2115.151148] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2115.164687] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2115.164870] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.162366] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.162366] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2117.162932] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2117.182010] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182188] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182293] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182409] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182529] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182649] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182766] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182882] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.182995] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2117.183125] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2118.825630] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.825951] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.825951] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2122.822357] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2124.827156] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.560916] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "83198ee3-dbb4-4088-b889-1aa9196f0b92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.561204] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "83198ee3-dbb4-4088-b889-1aa9196f0b92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.572765] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2126.620324] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.620324] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.621816] env[68443]: INFO nova.compute.claims [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2126.776213] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de31428a-581b-46b4-ac1e-0b8b5352f349 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.783692] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec98efba-47c1-4987-9f67-3663524f9bc5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.813691] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0d6cb4-ab77-40f1-911b-ebc93de72179 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.821136] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6d7406-1048-439f-b301-99faab43e09c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.835523] env[68443]: DEBUG nova.compute.provider_tree [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2126.844940] env[68443]: DEBUG nova.scheduler.client.report [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2126.858774] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.238s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.859284] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2126.893654] env[68443]: DEBUG nova.compute.utils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2126.895043] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2126.895222] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2126.903176] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2126.957329] env[68443]: DEBUG nova.policy [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3c9056fd69304807abfeb2fedc4ae20f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5d0592ea4b3c49698b73391ae2be0ad8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2126.964397] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2126.989253] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2126.989503] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2126.989664] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2126.989842] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2126.989990] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2126.990154] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2126.990374] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2126.990534] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2126.990700] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2126.990860] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2126.991044] env[68443]: DEBUG nova.virt.hardware [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2126.991919] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e2edb5-b18e-41ca-bf07-de9775fb1088 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.999363] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fa0863-0a73-4e53-b075-11417623f23e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.247306] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Successfully created port: d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2127.803303] env[68443]: DEBUG nova.compute.manager [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Received event network-vif-plugged-d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2127.803635] env[68443]: DEBUG oslo_concurrency.lockutils [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] Acquiring lock "83198ee3-dbb4-4088-b889-1aa9196f0b92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.803775] env[68443]: DEBUG oslo_concurrency.lockutils [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] Lock "83198ee3-dbb4-4088-b889-1aa9196f0b92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.803947] env[68443]: DEBUG oslo_concurrency.lockutils [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] Lock "83198ee3-dbb4-4088-b889-1aa9196f0b92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.804133] env[68443]: DEBUG nova.compute.manager [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] No waiting events found dispatching network-vif-plugged-d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2127.804303] env[68443]: WARNING nova.compute.manager [req-e7c11f7d-01aa-4e80-a623-61d177ab42d2 req-08f38db6-e52e-44a7-85d7-a1a0245cdb84 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Received unexpected event network-vif-plugged-d508bf11-e8c9-4573-95ac-b1924fe02da2 for instance with vm_state building and task_state spawning. [ 2127.825355] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.827979] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Successfully updated port: d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2127.839618] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2127.839770] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2127.839983] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2128.038703] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2128.194454] env[68443]: DEBUG nova.network.neutron [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Updating instance_info_cache with network_info: [{"id": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "address": "fa:16:3e:11:fc:8f", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd508bf11-e8", "ovs_interfaceid": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2128.208011] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.208313] env[68443]: DEBUG nova.compute.manager [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Instance network_info: |[{"id": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "address": "fa:16:3e:11:fc:8f", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd508bf11-e8", "ovs_interfaceid": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2128.208702] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:fc:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd508bf11-e8c9-4573-95ac-b1924fe02da2', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2128.216419] env[68443]: DEBUG oslo.service.loopingcall [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2128.216859] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2128.217450] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e769907f-d037-401a-8302-3c7c61ee69f3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.240465] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2128.240465] env[68443]: value = "task-3374077" [ 2128.240465] env[68443]: _type = "Task" [ 2128.240465] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.247642] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374077, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.750877] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374077, 'name': CreateVM_Task, 'duration_secs': 0.309841} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.751063] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2128.751720] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2128.751882] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.752252] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2128.752494] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6935430a-9993-49ee-938c-8173474f8757 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.756619] env[68443]: DEBUG oslo_vmware.api [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 2128.756619] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5283c463-8b4b-98d2-cd69-74889b79e59e" [ 2128.756619] env[68443]: _type = "Task" [ 2128.756619] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.767712] env[68443]: DEBUG oslo_vmware.api [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5283c463-8b4b-98d2-cd69-74889b79e59e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.268055] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2129.268463] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2129.268516] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2129.836327] env[68443]: DEBUG nova.compute.manager [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Received event network-changed-d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2129.836327] env[68443]: DEBUG nova.compute.manager [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Refreshing instance network info cache due to event network-changed-d508bf11-e8c9-4573-95ac-b1924fe02da2. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2129.836512] env[68443]: DEBUG oslo_concurrency.lockutils [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] Acquiring lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2129.836512] env[68443]: DEBUG oslo_concurrency.lockutils [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] Acquired lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2129.836636] env[68443]: DEBUG nova.network.neutron [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Refreshing network info cache for port d508bf11-e8c9-4573-95ac-b1924fe02da2 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2130.087189] env[68443]: DEBUG nova.network.neutron [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Updated VIF entry in instance network info cache for port d508bf11-e8c9-4573-95ac-b1924fe02da2. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2130.087556] env[68443]: DEBUG nova.network.neutron [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Updating instance_info_cache with network_info: [{"id": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "address": "fa:16:3e:11:fc:8f", "network": {"id": "ef236b73-c299-4b78-ba68-0be63802a04b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1138155627-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5d0592ea4b3c49698b73391ae2be0ad8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd508bf11-e8", "ovs_interfaceid": "d508bf11-e8c9-4573-95ac-b1924fe02da2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.097200] env[68443]: DEBUG oslo_concurrency.lockutils [req-145d6bfa-4155-4cad-9bc7-67f2619281e4 req-480e918b-c2e0-408f-9c3b-2203c8c77f31 service nova] Releasing lock "refresh_cache-83198ee3-dbb4-4088-b889-1aa9196f0b92" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2131.821055] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2147.623039] env[68443]: WARNING oslo_vmware.rw_handles [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2147.623039] env[68443]: ERROR oslo_vmware.rw_handles [ 2147.623039] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2147.625373] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2147.625607] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Copying Virtual Disk [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/fd4f943a-3729-436e-a68b-a7b435f4c220/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2147.625888] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b42920c-b4ee-42f3-8129-b96224ccf5c2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.635498] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for the task: (returnval){ [ 2147.635498] env[68443]: value = "task-3374078" [ 2147.635498] env[68443]: _type = "Task" [ 2147.635498] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.643364] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Task: {'id': task-3374078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.145316] env[68443]: DEBUG oslo_vmware.exceptions [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2148.145603] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.146164] env[68443]: ERROR nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2148.146164] env[68443]: Faults: ['InvalidArgument'] [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Traceback (most recent call last): [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] yield resources [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.driver.spawn(context, instance, image_meta, [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._fetch_image_if_missing(context, vi) [ 2148.146164] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] image_cache(vi, tmp_image_ds_loc) [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] vm_util.copy_virtual_disk( [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] session._wait_for_task(vmdk_copy_task) [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.wait_for_task(task_ref) [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return evt.wait() [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] result = hub.switch() [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2148.146629] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.greenlet.switch() [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.f(*self.args, **self.kw) [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] raise exceptions.translate_fault(task_info.error) [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Faults: ['InvalidArgument'] [ 2148.147072] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] [ 2148.147072] env[68443]: INFO nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Terminating instance [ 2148.148034] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.148255] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2148.148495] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9ffbf16-c3ee-43a4-b280-1b9cffe6b4c8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.150594] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2148.150756] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2148.150957] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2148.157676] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2148.157801] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2148.159072] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96209212-60c4-4eed-87c4-ce603c5a57b3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.166349] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for the task: (returnval){ [ 2148.166349] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52085707-994b-2465-32aa-294530cea52a" [ 2148.166349] env[68443]: _type = "Task" [ 2148.166349] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.173906] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52085707-994b-2465-32aa-294530cea52a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.179942] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2148.241607] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2148.254043] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Releasing lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2148.254471] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2148.254665] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2148.255775] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4aa4a31-f0c3-4cd3-9084-b894f87f11c2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.263374] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2148.263602] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa63c785-cb74-4479-a8f2-b84423f7acb3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.296468] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2148.296697] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2148.296866] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Deleting the datastore file [datastore1] 0a9bb99d-8f94-4f26-990e-a57aac09c328 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2148.297130] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa08954d-af0f-4426-97d6-cde383008020 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.302643] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for the task: (returnval){ [ 2148.302643] env[68443]: value = "task-3374080" [ 2148.302643] env[68443]: _type = "Task" [ 2148.302643] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.310848] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Task: {'id': task-3374080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.677226] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2148.677574] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Creating directory with path [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2148.677738] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-affb02a9-b032-43fa-8a18-9ef2bc8b3288 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.688694] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Created directory with path [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2148.688879] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Fetch image to [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2148.689055] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2148.689793] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de249449-1d47-4fb6-ac20-89c6abf6862e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.696326] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555a7efb-2f5c-4058-a620-1ce2ff4a4165 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.704973] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a76f9f7-437b-42e9-b9dd-70e6e1f86c07 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.734574] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30109478-028a-4ef8-af28-9fb0034fda99 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.739655] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-51906aaa-1ab3-4a7d-9436-772be64666d1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.760126] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2148.811963] env[68443]: DEBUG oslo_vmware.api [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Task: {'id': task-3374080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042367} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.812251] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2148.812453] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2148.812645] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2148.812835] env[68443]: INFO nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2148.813149] env[68443]: DEBUG oslo.service.loopingcall [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2148.813411] env[68443]: DEBUG nova.compute.manager [-] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2148.815666] env[68443]: DEBUG nova.compute.claims [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2148.815836] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.816060] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.922036] env[68443]: DEBUG oslo_vmware.rw_handles [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2148.984724] env[68443]: DEBUG oslo_vmware.rw_handles [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2148.985401] env[68443]: DEBUG oslo_vmware.rw_handles [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2149.033150] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a5948f-a021-4e77-ae3c-ce862d1c4773 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.040192] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46439f5-2a3d-4b3c-af43-1aa6e77fa43e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.070376] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd19201-66fd-4768-9308-815bc6147d8d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.076883] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddb3baa-a4f6-465b-86b1-f7951de6dfe1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.089244] env[68443]: DEBUG nova.compute.provider_tree [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.097659] env[68443]: DEBUG nova.scheduler.client.report [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2149.111525] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.112037] env[68443]: ERROR nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2149.112037] env[68443]: Faults: ['InvalidArgument'] [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Traceback (most recent call last): [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.driver.spawn(context, instance, image_meta, [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._fetch_image_if_missing(context, vi) [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] image_cache(vi, tmp_image_ds_loc) [ 2149.112037] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] vm_util.copy_virtual_disk( [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] session._wait_for_task(vmdk_copy_task) [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.wait_for_task(task_ref) [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return evt.wait() [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] result = hub.switch() [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.greenlet.switch() [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2149.112415] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.f(*self.args, **self.kw) [ 2149.112772] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2149.112772] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] raise exceptions.translate_fault(task_info.error) [ 2149.112772] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2149.112772] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Faults: ['InvalidArgument'] [ 2149.112772] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] [ 2149.112772] env[68443]: DEBUG nova.compute.utils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2149.114134] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Build of instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 was re-scheduled: A specified parameter was not correct: fileType [ 2149.114134] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2149.114507] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2149.114723] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.114868] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.115062] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2149.138156] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2149.195835] env[68443]: DEBUG nova.network.neutron [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.205767] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Releasing lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.205971] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2149.206173] env[68443]: DEBUG nova.compute.manager [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2149.296725] env[68443]: INFO nova.scheduler.client.report [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Deleted allocations for instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 [ 2149.316358] env[68443]: DEBUG oslo_concurrency.lockutils [None req-cf5b25b0-d846-4c73-8f4f-afc05cfd8263 tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 613.945s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.316607] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 417.065s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.316828] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "0a9bb99d-8f94-4f26-990e-a57aac09c328-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.317052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.317225] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.319451] env[68443]: INFO nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Terminating instance [ 2149.321161] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquiring lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.321241] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Acquired lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2149.321378] env[68443]: DEBUG nova.network.neutron [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2149.344984] env[68443]: DEBUG nova.network.neutron [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2149.424817] env[68443]: DEBUG nova.network.neutron [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.424817] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Releasing lock "refresh_cache-0a9bb99d-8f94-4f26-990e-a57aac09c328" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.424817] env[68443]: DEBUG nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2149.424817] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2149.424817] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f52f0202-17cb-46c9-add1-371a250f7dc5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.430378] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e2acd2-ac2c-4f81-8bb8-4d5a490cd4c8 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.460181] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a9bb99d-8f94-4f26-990e-a57aac09c328 could not be found. [ 2149.460426] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2149.460658] env[68443]: INFO nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2149.460966] env[68443]: DEBUG oslo.service.loopingcall [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2149.461233] env[68443]: DEBUG nova.compute.manager [-] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2149.461359] env[68443]: DEBUG nova.network.neutron [-] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2149.560975] env[68443]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2149.561190] env[68443]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-e45b0959-3e03-45d5-b32d-1ef6c8826004'] [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2149.561693] env[68443]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2149.562178] env[68443]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.562788] env[68443]: ERROR oslo.service.loopingcall [ 2149.563125] env[68443]: ERROR nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.589085] env[68443]: ERROR nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Traceback (most recent call last): [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] exception_handler_v20(status_code, error_body) [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] raise client_exc(message=error_message, [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Neutron server returns request_ids: ['req-e45b0959-3e03-45d5-b32d-1ef6c8826004'] [ 2149.589085] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] During handling of the above exception, another exception occurred: [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Traceback (most recent call last): [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._delete_instance(context, instance, bdms) [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._shutdown_instance(context, instance, bdms) [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._try_deallocate_network(context, instance, requested_networks) [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] with excutils.save_and_reraise_exception(): [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.589353] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.force_reraise() [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] raise self.value [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] _deallocate_network_with_retries() [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return evt.wait() [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] result = hub.switch() [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.greenlet.switch() [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2149.589584] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] result = func(*self.args, **self.kw) [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] result = f(*args, **kwargs) [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._deallocate_network( [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self.network_api.deallocate_for_instance( [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] data = neutron.list_ports(**search_opts) [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.list('ports', self.ports_path, retrieve_all, [ 2149.589792] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] for r in self._pagination(collection, path, **params): [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] res = self.get(path, params=params) [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.retry_request("GET", action, body=body, [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2149.590034] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] return self.do_request(method, action, body=body, [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] ret = obj(*args, **kwargs) [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] self._handle_fault_response(status_code, replybody, resp) [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.590648] env[68443]: ERROR nova.compute.manager [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] [ 2149.616316] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.299s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.616624] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 255.535s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.616812] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] During sync_power_state the instance has a pending task (deleting). Skip. [ 2149.617021] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "0a9bb99d-8f94-4f26-990e-a57aac09c328" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.669093] env[68443]: INFO nova.compute.manager [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] [instance: 0a9bb99d-8f94-4f26-990e-a57aac09c328] Successfully reverted task state from None on failure for instance. [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server [None req-c67e550c-0161-486c-952c-cb969b4391ab tempest-ServerShowV254Test-318671727 tempest-ServerShowV254Test-318671727-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-e45b0959-3e03-45d5-b32d-1ef6c8826004'] [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2149.672394] env[68443]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.673121] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2149.673719] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2149.674096] env[68443]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.674665] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2149.675243] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2149.675997] env[68443]: ERROR oslo_messaging.rpc.server [ 2174.826605] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2175.825360] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2175.837897] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.838184] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.838308] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.838464] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2175.839590] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25d8bae-e80f-415b-ae9e-190e37d63328 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.848314] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6350e20-9fe5-446a-adc9-5ede0c8bc18f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.861866] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c96ec6-5ed7-459a-8e2c-e8a05d9e2e89 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.867754] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb7c834-a13d-4812-9ee8-821066665d21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.898110] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180937MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2175.898269] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.898456] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2176.006031] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006141] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006215] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006338] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006462] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006583] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006698] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006816] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.006932] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2176.007145] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2176.007290] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2176.023689] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2176.042403] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2176.042581] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2176.052652] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2176.075088] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2176.184166] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca7f7da-607a-489d-8224-4f8af8a3d12a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.191761] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9f98e5-ac27-47f7-83d6-3d78c28e135b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.221295] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0429260c-97f3-4d6a-9546-e18fe4609964 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.228730] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e93a75-465f-49b0-8841-10d8eb90b583 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.241440] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2176.250183] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2176.263670] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2176.263840] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.365s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2177.263928] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.825707] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.825884] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2177.826024] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2177.845792] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846011] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846205] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846384] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846560] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846728] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.846893] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.847086] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.847259] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2177.847428] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2178.825491] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.825775] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2180.826310] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.825399] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2181.825579] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2181.834992] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2184.830708] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.824717] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2187.825575] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2187.825878] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.834597] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2189.834960] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2194.154376] env[68443]: WARNING oslo_vmware.rw_handles [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2194.154376] env[68443]: ERROR oslo_vmware.rw_handles [ 2194.155040] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2194.156905] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2194.157159] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Copying Virtual Disk [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/39261284-a098-4f05-8545-1eb9ff196ff3/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2194.157441] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-182cc463-f1b7-459b-a09b-e1acb8d546fb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.164573] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for the task: (returnval){ [ 2194.164573] env[68443]: value = "task-3374081" [ 2194.164573] env[68443]: _type = "Task" [ 2194.164573] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.172305] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Task: {'id': task-3374081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.674883] env[68443]: DEBUG oslo_vmware.exceptions [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2194.675174] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2194.675725] env[68443]: ERROR nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2194.675725] env[68443]: Faults: ['InvalidArgument'] [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Traceback (most recent call last): [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] yield resources [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self.driver.spawn(context, instance, image_meta, [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self._fetch_image_if_missing(context, vi) [ 2194.675725] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] image_cache(vi, tmp_image_ds_loc) [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] vm_util.copy_virtual_disk( [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] session._wait_for_task(vmdk_copy_task) [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return self.wait_for_task(task_ref) [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return evt.wait() [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] result = hub.switch() [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2194.676072] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return self.greenlet.switch() [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self.f(*self.args, **self.kw) [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] raise exceptions.translate_fault(task_info.error) [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Faults: ['InvalidArgument'] [ 2194.676452] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] [ 2194.676452] env[68443]: INFO nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Terminating instance [ 2194.677604] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2194.677812] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2194.678064] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14250ac6-f41d-4ffb-9de9-05d1f1045e26 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.680335] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2194.680530] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2194.681255] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c8cc6a-8d13-4d6f-ab42-8c0548310086 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.688153] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2194.688365] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e92c30a-7092-4b7b-88f5-67b1cfd8e52d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.690504] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2194.690675] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2194.691773] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a359505-9f2b-4b5b-bccc-a9fa78439870 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.696215] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 2194.696215] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52e4b033-a8a9-e35f-959a-62d42312e7bf" [ 2194.696215] env[68443]: _type = "Task" [ 2194.696215] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.703538] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52e4b033-a8a9-e35f-959a-62d42312e7bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.761643] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2194.761890] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2194.762084] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Deleting the datastore file [datastore1] b8c2916e-3b70-42c9-9f85-ee8582c636b8 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.762367] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-737667f7-bc13-4c15-8807-9173804cfd7e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.768089] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for the task: (returnval){ [ 2194.768089] env[68443]: value = "task-3374083" [ 2194.768089] env[68443]: _type = "Task" [ 2194.768089] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.775521] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Task: {'id': task-3374083, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2195.209082] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2195.209082] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating directory with path [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2195.209082] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e03635d0-e252-4699-a9c8-34b94bf0e860 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.223050] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created directory with path [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2195.223050] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Fetch image to [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2195.223050] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2195.223050] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaef020-16e4-4c03-9417-b7ac95d5e7d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.229300] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4eb70b-d01a-4ac1-8df0-385feba8ab98 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.239023] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8138200a-131e-408d-a110-e7f4f2dd7efd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.273395] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7e5be8-5496-4a91-b76a-b8fd0b275330 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.280127] env[68443]: DEBUG oslo_vmware.api [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Task: {'id': task-3374083, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076836} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.281672] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2195.282035] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2195.282714] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2195.282714] env[68443]: INFO nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2195.284641] env[68443]: DEBUG nova.compute.claims [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2195.284813] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.285040] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.287545] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-669fc56c-5c77-4cec-b412-a1ea5fb56daa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.309598] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2195.372534] env[68443]: DEBUG oslo_vmware.rw_handles [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2195.436027] env[68443]: DEBUG oslo_vmware.rw_handles [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2195.436274] env[68443]: DEBUG oslo_vmware.rw_handles [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2195.497831] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1570eac-de5d-47f0-a2c4-23fa211631e7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.505043] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0c696c-fbae-40fe-845f-bc58a54e45bf {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.535308] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf196d8d-299e-464e-b9af-01444b97357a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.542148] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eb4aa8-1f37-4072-9367-da4d78a1008c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.554928] env[68443]: DEBUG nova.compute.provider_tree [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.564686] env[68443]: DEBUG nova.scheduler.client.report [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2195.579259] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.294s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2195.579859] env[68443]: ERROR nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2195.579859] env[68443]: Faults: ['InvalidArgument'] [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Traceback (most recent call last): [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self.driver.spawn(context, instance, image_meta, [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self._fetch_image_if_missing(context, vi) [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] image_cache(vi, tmp_image_ds_loc) [ 2195.579859] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] vm_util.copy_virtual_disk( [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] session._wait_for_task(vmdk_copy_task) [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return self.wait_for_task(task_ref) [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return evt.wait() [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] result = hub.switch() [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] return self.greenlet.switch() [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2195.580205] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] self.f(*self.args, **self.kw) [ 2195.580474] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2195.580474] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] raise exceptions.translate_fault(task_info.error) [ 2195.580474] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2195.580474] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Faults: ['InvalidArgument'] [ 2195.580474] env[68443]: ERROR nova.compute.manager [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] [ 2195.580836] env[68443]: DEBUG nova.compute.utils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2195.582321] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Build of instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 was re-scheduled: A specified parameter was not correct: fileType [ 2195.582321] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2195.582689] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2195.582862] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2195.583070] env[68443]: DEBUG nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2195.583253] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2195.890145] env[68443]: DEBUG nova.network.neutron [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.914421] env[68443]: INFO nova.compute.manager [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Took 0.33 seconds to deallocate network for instance. [ 2196.029453] env[68443]: INFO nova.scheduler.client.report [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Deleted allocations for instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 [ 2196.063339] env[68443]: DEBUG oslo_concurrency.lockutils [None req-01346795-cda2-42e3-b399-a9b988b74560 tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 652.062s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.063649] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 456.616s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.063921] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Acquiring lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.064165] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.064400] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.068502] env[68443]: INFO nova.compute.manager [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Terminating instance [ 2196.069506] env[68443]: DEBUG nova.compute.manager [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2196.069717] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2196.069989] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54e5d55d-a44d-466e-8106-907ff91b73f0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.080985] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e433f58-5d65-413f-abae-bf41f3a156ec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.121246] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8c2916e-3b70-42c9-9f85-ee8582c636b8 could not be found. [ 2196.121246] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2196.121246] env[68443]: INFO nova.compute.manager [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2196.121246] env[68443]: DEBUG oslo.service.loopingcall [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2196.121246] env[68443]: DEBUG nova.compute.manager [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2196.121758] env[68443]: DEBUG nova.network.neutron [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2196.152173] env[68443]: DEBUG nova.network.neutron [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2196.160962] env[68443]: INFO nova.compute.manager [-] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] Took 0.04 seconds to deallocate network for instance. [ 2196.285128] env[68443]: DEBUG oslo_concurrency.lockutils [None req-b6faa615-3e84-4881-89dd-201c2d347d9a tempest-VolumesAdminNegativeTest-1014115721 tempest-VolumesAdminNegativeTest-1014115721-project-member] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.220s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.285128] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 302.202s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.285128] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: b8c2916e-3b70-42c9-9f85-ee8582c636b8] During sync_power_state the instance has a pending task (deleting). Skip. [ 2196.285128] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "b8c2916e-3b70-42c9-9f85-ee8582c636b8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.319736] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "434a8f2e-7c08-4b16-b255-45b168679f49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2200.319996] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Lock "434a8f2e-7c08-4b16-b255-45b168679f49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.332651] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2200.400127] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2200.400403] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.401862] env[68443]: INFO nova.compute.claims [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2200.582170] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4343a8a-fb66-440f-a37e-62befda2a3d3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.590109] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d776a37-f0e2-4a3b-8909-18042b271093 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.619665] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89431a2-0fb7-432b-9fa6-508812a29425 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.628554] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0558fed-a26d-4f35-8827-ea8a1a6c6731 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.645704] env[68443]: DEBUG nova.compute.provider_tree [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2200.654753] env[68443]: DEBUG nova.scheduler.client.report [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2200.673813] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.273s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.674477] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2200.715714] env[68443]: DEBUG nova.compute.utils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2200.716968] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2200.717207] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2200.728549] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2200.771653] env[68443]: DEBUG nova.policy [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c7b3ce346324502beae66a4cab6a9f0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09247f20104e446784da3e31b3117a0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2200.789912] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2200.820219] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2200.820472] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2200.820630] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2200.820811] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2200.820956] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2200.821430] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2200.821673] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2200.821841] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2200.822229] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2200.822433] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2200.822624] env[68443]: DEBUG nova.virt.hardware [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2200.823966] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a81e63-68e1-4fb4-b18c-5eb70b6f3e0f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.832444] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fff50fb-9b8b-473b-b277-09df2a317c9d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.082978] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Successfully created port: 113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2201.839557] env[68443]: DEBUG nova.compute.manager [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Received event network-vif-plugged-113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2201.839804] env[68443]: DEBUG oslo_concurrency.lockutils [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] Acquiring lock "434a8f2e-7c08-4b16-b255-45b168679f49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.840032] env[68443]: DEBUG oslo_concurrency.lockutils [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] Lock "434a8f2e-7c08-4b16-b255-45b168679f49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.840216] env[68443]: DEBUG oslo_concurrency.lockutils [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] Lock "434a8f2e-7c08-4b16-b255-45b168679f49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.840383] env[68443]: DEBUG nova.compute.manager [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] No waiting events found dispatching network-vif-plugged-113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2201.840548] env[68443]: WARNING nova.compute.manager [req-0695c84a-e0e8-4c21-bb1f-8d0f6541a07c req-59fdd736-457a-41ea-a2fd-82697fa98953 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Received unexpected event network-vif-plugged-113411ee-a92b-4dc0-81a6-6bf425a9f7e9 for instance with vm_state building and task_state spawning. [ 2201.861635] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Successfully updated port: 113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2201.872638] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2201.872785] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquired lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2201.873081] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2201.919814] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2202.079196] env[68443]: DEBUG nova.network.neutron [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Updating instance_info_cache with network_info: [{"id": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "address": "fa:16:3e:ef:eb:f3", "network": {"id": "32e67042-b96a-4842-9799-4808013796a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1646896914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09247f20104e446784da3e31b3117a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap113411ee-a9", "ovs_interfaceid": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2202.091755] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Releasing lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.092147] env[68443]: DEBUG nova.compute.manager [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Instance network_info: |[{"id": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "address": "fa:16:3e:ef:eb:f3", "network": {"id": "32e67042-b96a-4842-9799-4808013796a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1646896914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09247f20104e446784da3e31b3117a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap113411ee-a9", "ovs_interfaceid": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2202.092562] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:eb:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '113411ee-a92b-4dc0-81a6-6bf425a9f7e9', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2202.101030] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Creating folder: Project (09247f20104e446784da3e31b3117a0b). Parent ref: group-v673136. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2202.101030] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75c83f4f-d1cc-4f31-90ad-fc8c5cbab296 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.113999] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Created folder: Project (09247f20104e446784da3e31b3117a0b) in parent group-v673136. [ 2202.114196] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Creating folder: Instances. Parent ref: group-v673239. {{(pid=68443) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2202.114414] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a877d91a-3cc8-4673-a843-b969f3461d15 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.123878] env[68443]: INFO nova.virt.vmwareapi.vm_util [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Created folder: Instances in parent group-v673239. [ 2202.124117] env[68443]: DEBUG oslo.service.loopingcall [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2202.124302] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2202.124492] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a23f5b6-d699-4223-8d4a-6e4b2767ad27 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.142989] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2202.142989] env[68443]: value = "task-3374086" [ 2202.142989] env[68443]: _type = "Task" [ 2202.142989] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.150217] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374086, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.652614] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374086, 'name': CreateVM_Task, 'duration_secs': 0.298136} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.652751] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2202.653407] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.653577] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.653896] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2202.654176] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d6b18a-9014-4ee3-8159-6a87bb4a77d1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.658311] env[68443]: DEBUG oslo_vmware.api [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Waiting for the task: (returnval){ [ 2202.658311] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]527804ae-eb1c-a2ef-d0ce-78944e08e085" [ 2202.658311] env[68443]: _type = "Task" [ 2202.658311] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.673850] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2202.674093] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2202.674297] env[68443]: DEBUG oslo_concurrency.lockutils [None req-524a989f-9ccf-4947-8c47-163ec6fa483a tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.869700] env[68443]: DEBUG nova.compute.manager [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Received event network-changed-113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2203.869986] env[68443]: DEBUG nova.compute.manager [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Refreshing instance network info cache due to event network-changed-113411ee-a92b-4dc0-81a6-6bf425a9f7e9. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2203.870192] env[68443]: DEBUG oslo_concurrency.lockutils [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] Acquiring lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.870338] env[68443]: DEBUG oslo_concurrency.lockutils [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] Acquired lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.870501] env[68443]: DEBUG nova.network.neutron [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Refreshing network info cache for port 113411ee-a92b-4dc0-81a6-6bf425a9f7e9 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2204.127265] env[68443]: DEBUG nova.network.neutron [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Updated VIF entry in instance network info cache for port 113411ee-a92b-4dc0-81a6-6bf425a9f7e9. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2204.127626] env[68443]: DEBUG nova.network.neutron [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Updating instance_info_cache with network_info: [{"id": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "address": "fa:16:3e:ef:eb:f3", "network": {"id": "32e67042-b96a-4842-9799-4808013796a9", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1646896914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "09247f20104e446784da3e31b3117a0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap113411ee-a9", "ovs_interfaceid": "113411ee-a92b-4dc0-81a6-6bf425a9f7e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.137095] env[68443]: DEBUG oslo_concurrency.lockutils [req-b8cd50da-676c-45d9-9eeb-046d26062ea9 req-8c518bbf-9dbe-4729-9236-f223a206ecf4 service nova] Releasing lock "refresh_cache-434a8f2e-7c08-4b16-b255-45b168679f49" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2235.832793] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.833189] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.844896] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.845122] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.845302] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.845454] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2235.846539] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878595ab-72c6-4f58-afa2-c2b4c4ca8bad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.855304] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1248327-6ba0-4c40-a556-655b1771ef62 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.868961] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59af5921-b8db-41de-a3c0-2d614622b568 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.875069] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae9661b-2e86-4652-bae1-ab773f46a807 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.904351] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180939MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2235.904501] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.904672] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.972669] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.972835] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.972964] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973100] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973221] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973338] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973455] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973568] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973685] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 434a8f2e-7c08-4b16-b255-45b168679f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2235.973874] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2235.974018] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2236.082911] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a697d47-a6fc-497f-8963-d0f74679b3a0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.090765] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d069fad4-5575-489b-a5a4-750ec1711a23 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.120635] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1742d3-3698-480d-a5d9-3fc0f260cb6d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.127551] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d1342e-5471-454e-92b7-f1f52cdcc8ad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.140356] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.148363] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2236.161495] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2236.161675] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.257s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.154679] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.826031] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2238.826031] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2239.826090] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.826553] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2239.826553] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2239.846052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846052] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2239.846285] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2242.651291] env[68443]: WARNING oslo_vmware.rw_handles [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2242.651291] env[68443]: ERROR oslo_vmware.rw_handles [ 2242.651937] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2242.653832] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2242.654091] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Copying Virtual Disk [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/5aad63f8-b384-4aba-bc4e-1c3b47f9f2c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2242.654370] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a13a26ce-7c21-463b-a4cf-a0c6e517b0ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.661989] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 2242.661989] env[68443]: value = "task-3374087" [ 2242.661989] env[68443]: _type = "Task" [ 2242.661989] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.669535] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': task-3374087, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.824414] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2243.171959] env[68443]: DEBUG oslo_vmware.exceptions [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2243.172335] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.172902] env[68443]: ERROR nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2243.172902] env[68443]: Faults: ['InvalidArgument'] [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Traceback (most recent call last): [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] yield resources [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.driver.spawn(context, instance, image_meta, [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._fetch_image_if_missing(context, vi) [ 2243.172902] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] image_cache(vi, tmp_image_ds_loc) [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] vm_util.copy_virtual_disk( [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] session._wait_for_task(vmdk_copy_task) [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.wait_for_task(task_ref) [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return evt.wait() [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] result = hub.switch() [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2243.173260] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.greenlet.switch() [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.f(*self.args, **self.kw) [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] raise exceptions.translate_fault(task_info.error) [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Faults: ['InvalidArgument'] [ 2243.173605] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] [ 2243.173605] env[68443]: INFO nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Terminating instance [ 2243.174804] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.175021] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2243.175271] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23456f1b-7525-438d-aa49-f2269ef3be34 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.177499] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2243.177662] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.177828] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2243.184783] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2243.184956] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2243.186138] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1160404f-ae97-4ef1-8148-5e8161f95551 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.193578] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 2243.193578] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52062284-e87b-b2ee-c297-cbde137cb735" [ 2243.193578] env[68443]: _type = "Task" [ 2243.193578] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.201563] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52062284-e87b-b2ee-c297-cbde137cb735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.207331] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2243.270197] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2243.279091] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.279503] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2243.279701] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2243.280732] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0744ee9f-6833-499b-a7e8-2595a6f12c51 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.288330] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2243.288540] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-602f30e9-33eb-4e1c-a434-92e2cc7fbf96 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.316690] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2243.316906] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2243.317102] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleting the datastore file [datastore1] 062710e8-2ccb-4926-97ce-bf6a9fa4d10c {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2243.317350] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a951c0-7dd1-40d6-a540-48d037de7852 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.323606] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 2243.323606] env[68443]: value = "task-3374089" [ 2243.323606] env[68443]: _type = "Task" [ 2243.323606] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.330803] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': task-3374089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.704052] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2243.704380] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Creating directory with path [datastore1] vmware_temp/ba5bc9e3-855b-4fc2-8660-57af8cafefc1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2243.704594] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc598c0a-6f91-4180-ade9-9b599fdd6610 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.715360] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Created directory with path [datastore1] vmware_temp/ba5bc9e3-855b-4fc2-8660-57af8cafefc1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2243.715608] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Fetch image to [datastore1] vmware_temp/ba5bc9e3-855b-4fc2-8660-57af8cafefc1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2243.715834] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/ba5bc9e3-855b-4fc2-8660-57af8cafefc1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2243.716559] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2beae707-c0fe-449f-a816-2a02c10162b7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.723021] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b40de18-e296-40d1-94d9-a570433fd7cc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.731712] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5349be5b-8de9-4eff-80a9-81bfd02d89ec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.762111] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1928c645-bb26-49f6-9f7a-0d517c532071 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.767483] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01ec20f0-7f50-45ec-9001-8c0f71acd2ab {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.786728] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2243.832384] env[68443]: DEBUG oslo_vmware.api [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': task-3374089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030709} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.832623] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2243.832808] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2243.832980] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2243.833230] env[68443]: INFO nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2243.833408] env[68443]: DEBUG oslo.service.loopingcall [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2243.833630] env[68443]: DEBUG nova.compute.manager [-] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2243.835855] env[68443]: DEBUG nova.compute.claims [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2243.836263] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.836263] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.921144] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2243.921985] env[68443]: ERROR nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = getattr(controller, method)(*args, **kwargs) [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._get(image_id) [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2243.921985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] resp, body = self.http_client.get(url, headers=header) [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.request(url, 'GET', **kwargs) [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._handle_response(resp) [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise exc.from_response(resp, resp.content) [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] During handling of the above exception, another exception occurred: [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2243.922331] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] yield resources [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self.driver.spawn(context, instance, image_meta, [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._fetch_image_if_missing(context, vi) [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image_fetch(context, vi, tmp_image_ds_loc) [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] images.fetch_image( [ 2243.922679] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] metadata = IMAGE_API.get(context, image_ref) [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return session.show(context, image_id, [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] _reraise_translated_image_exception(image_id) [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise new_exc.with_traceback(exc_trace) [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = getattr(controller, method)(*args, **kwargs) [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2243.923102] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._get(image_id) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] resp, body = self.http_client.get(url, headers=header) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.request(url, 'GET', **kwargs) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._handle_response(resp) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise exc.from_response(resp, resp.content) [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 2243.923456] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2243.923812] env[68443]: INFO nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Terminating instance [ 2243.923812] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.924388] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2243.924879] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2243.925047] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2243.925220] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2243.926102] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49890dff-20c1-4e03-bfc8-aedad3212929 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.937378] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2243.937560] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2243.938992] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c58bb677-1137-4fba-a45b-32487dbf867a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.946187] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for the task: (returnval){ [ 2243.946187] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]528369c3-1eb8-bf7a-48d5-24b1e840dc31" [ 2243.946187] env[68443]: _type = "Task" [ 2243.946187] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.954114] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]528369c3-1eb8-bf7a-48d5-24b1e840dc31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.957101] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2243.989132] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d802d1fe-056c-45f9-86b7-34d73ee2f127 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.995663] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274dd5c8-a4ad-44b8-adf9-ca9272a7009d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.026162] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2244.028225] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613d3c2c-728e-4d36-8814-383ec4ded06e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.035419] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a7d2a9-81f8-4634-8b9e-854f91615dea {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.796727] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2244.797163] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2244.797364] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2244.801655] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f436e035-d166-4e2c-b6d6-993b4915a781 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.811797] env[68443]: DEBUG nova.compute.provider_tree [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2244.817707] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2244.820436] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-122701e6-ee33-4cf9-81e4-9ac69cf49a80 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.821897] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2244.822789] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2244.823016] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Creating directory with path [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2244.823748] env[68443]: DEBUG nova.scheduler.client.report [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2244.826724] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1618415-714d-47f0-8434-3a68c9b6fa17 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.837787] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.001s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2244.838317] env[68443]: ERROR nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2244.838317] env[68443]: Faults: ['InvalidArgument'] [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Traceback (most recent call last): [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.driver.spawn(context, instance, image_meta, [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._fetch_image_if_missing(context, vi) [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] image_cache(vi, tmp_image_ds_loc) [ 2244.838317] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] vm_util.copy_virtual_disk( [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] session._wait_for_task(vmdk_copy_task) [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.wait_for_task(task_ref) [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return evt.wait() [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] result = hub.switch() [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.greenlet.switch() [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2244.838777] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.f(*self.args, **self.kw) [ 2244.839299] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2244.839299] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] raise exceptions.translate_fault(task_info.error) [ 2244.839299] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2244.839299] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Faults: ['InvalidArgument'] [ 2244.839299] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] [ 2244.839299] env[68443]: DEBUG nova.compute.utils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2244.840840] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Build of instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c was re-scheduled: A specified parameter was not correct: fileType [ 2244.840840] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2244.843809] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2244.843809] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2244.843809] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2244.843809] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2244.848684] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Created directory with path [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2244.848880] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Fetch image to [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2244.849068] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2244.849857] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aa2583-1aed-426a-90b6-e16c7a2708c6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.859711] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a0f44f-3ef5-4f5d-a155-27b0880093b1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.862036] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2244.862265] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2244.862459] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleting the datastore file [datastore1] 963d7f65-a761-4ce1-b6d1-fc987c3111c0 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2244.862688] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efee8a7f-b310-4466-8146-578b96d21ee7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.872097] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fcc348-1d34-404e-82fe-df28d71b39ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.876011] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for the task: (returnval){ [ 2244.876011] env[68443]: value = "task-3374091" [ 2244.876011] env[68443]: _type = "Task" [ 2244.876011] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.876757] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2244.911631] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bd20e3-c92f-4318-bf57-d8a0fc772c09 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.917048] env[68443]: DEBUG oslo_vmware.api [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Task: {'id': task-3374091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035827} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.917578] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2244.917764] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2244.917932] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2244.918117] env[68443]: INFO nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Took 0.12 seconds to destroy the instance on the hypervisor. [ 2244.918355] env[68443]: DEBUG oslo.service.loopingcall [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2244.918639] env[68443]: DEBUG nova.compute.manager [-] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2244.921922] env[68443]: DEBUG nova.compute.claims [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2244.922102] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.922334] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.925564] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ffec0f4d-4f1b-4ee1-aba7-380627c81350 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.949343] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2244.994491] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2245.053622] env[68443]: DEBUG nova.network.neutron [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.060606] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2245.060606] env[68443]: DEBUG oslo_vmware.rw_handles [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2245.063240] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.063443] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2245.063650] env[68443]: DEBUG nova.compute.manager [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2245.156158] env[68443]: INFO nova.scheduler.client.report [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleted allocations for instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c [ 2245.172741] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38a8efd-0e73-4dc4-8b38-dacd2ff5f051 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.180930] env[68443]: DEBUG oslo_concurrency.lockutils [None req-57719c5f-d70b-4fda-9008-f8dabc83cd4a tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.020s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.181909] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de038b89-745b-45aa-8d21-4dd6f5e29113 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.184963] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 351.103s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.185257] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] During sync_power_state the instance has a pending task (spawning). Skip. [ 2245.185353] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.185838] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 244.215s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.186075] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.186279] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.186431] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.188582] env[68443]: INFO nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Terminating instance [ 2245.191080] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.191261] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.191490] env[68443]: DEBUG nova.network.neutron [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2245.218588] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c565cf-3c00-4f63-8498-13a70e14d881 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.226413] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be87d36-c79e-4d92-abaa-5a69b6ecefa5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.241803] env[68443]: DEBUG nova.compute.provider_tree [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2245.243312] env[68443]: DEBUG nova.network.neutron [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2245.252181] env[68443]: DEBUG nova.scheduler.client.report [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2245.268551] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.346s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.269424] env[68443]: ERROR nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = getattr(controller, method)(*args, **kwargs) [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._get(image_id) [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2245.269424] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] resp, body = self.http_client.get(url, headers=header) [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.request(url, 'GET', **kwargs) [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._handle_response(resp) [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise exc.from_response(resp, resp.content) [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] During handling of the above exception, another exception occurred: [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.269716] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self.driver.spawn(context, instance, image_meta, [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._fetch_image_if_missing(context, vi) [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image_fetch(context, vi, tmp_image_ds_loc) [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] images.fetch_image( [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] metadata = IMAGE_API.get(context, image_ref) [ 2245.269970] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return session.show(context, image_id, [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] _reraise_translated_image_exception(image_id) [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise new_exc.with_traceback(exc_trace) [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = getattr(controller, method)(*args, **kwargs) [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._get(image_id) [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2245.270259] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] resp, body = self.http_client.get(url, headers=header) [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.request(url, 'GET', **kwargs) [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self._handle_response(resp) [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise exc.from_response(resp, resp.content) [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] nova.exception.ImageNotAuthorized: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. [ 2245.270556] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.270556] env[68443]: DEBUG nova.compute.utils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2245.271683] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Build of instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 was re-scheduled: Not authorized for image a80a8b97-4d56-4702-9b02-9d115fcd6710. {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2245.272144] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2245.272367] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.272815] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.272815] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2245.301204] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2245.306954] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "d19509cf-7828-4e55-bf2b-4c57b9eab217" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.307198] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "d19509cf-7828-4e55-bf2b-4c57b9eab217" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.317673] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2245.335503] env[68443]: DEBUG nova.network.neutron [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.344525] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-062710e8-2ccb-4926-97ce-bf6a9fa4d10c" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.344866] env[68443]: DEBUG nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2245.345312] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2245.345826] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4b6e89d-1af2-4863-95eb-eb935d2758c5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.354883] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce549c8f-f8e9-4ff3-a256-c48ee9ce157b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.386721] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 062710e8-2ccb-4926-97ce-bf6a9fa4d10c could not be found. [ 2245.386935] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2245.387133] env[68443]: INFO nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2245.387379] env[68443]: DEBUG oslo.service.loopingcall [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.388250] env[68443]: DEBUG nova.network.neutron [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.389987] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.390156] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.391563] env[68443]: INFO nova.compute.claims [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2245.395084] env[68443]: DEBUG nova.compute.manager [-] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2245.395171] env[68443]: DEBUG nova.network.neutron [-] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2245.397648] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.397848] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2245.398037] env[68443]: DEBUG nova.compute.manager [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Skipping network deallocation for instance since networking was not requested. {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2245.487749] env[68443]: INFO nova.scheduler.client.report [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Deleted allocations for instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 [ 2245.505331] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bb0e9f67-5a49-4103-a9b5-8a0fcd473d40 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.164s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.507022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.536s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.507022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2245.507022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.507022] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.508135] env[68443]: INFO nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Terminating instance [ 2245.509687] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquiring lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2245.509790] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Acquired lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2245.509955] env[68443]: DEBUG nova.network.neutron [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2245.536772] env[68443]: DEBUG nova.network.neutron [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2245.541023] env[68443]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2245.541276] env[68443]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-182aa2f6-a5e3-4ce2-b47d-11a8244813b9'] [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.541769] env[68443]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.542222] env[68443]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.542716] env[68443]: ERROR oslo.service.loopingcall [ 2245.543162] env[68443]: ERROR nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.570349] env[68443]: ERROR nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Traceback (most recent call last): [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] exception_handler_v20(status_code, error_body) [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] raise client_exc(message=error_message, [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Neutron server returns request_ids: ['req-182aa2f6-a5e3-4ce2-b47d-11a8244813b9'] [ 2245.570349] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] During handling of the above exception, another exception occurred: [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Traceback (most recent call last): [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._delete_instance(context, instance, bdms) [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._shutdown_instance(context, instance, bdms) [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._try_deallocate_network(context, instance, requested_networks) [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] with excutils.save_and_reraise_exception(): [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.570672] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.force_reraise() [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] raise self.value [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] _deallocate_network_with_retries() [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return evt.wait() [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] result = hub.switch() [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.greenlet.switch() [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.570969] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] result = func(*self.args, **self.kw) [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] result = f(*args, **kwargs) [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._deallocate_network( [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self.network_api.deallocate_for_instance( [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] data = neutron.list_ports(**search_opts) [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.list('ports', self.ports_path, retrieve_all, [ 2245.571325] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] for r in self._pagination(collection, path, **params): [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] res = self.get(path, params=params) [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.retry_request("GET", action, body=body, [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.571618] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] return self.do_request(method, action, body=body, [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] ret = obj(*args, **kwargs) [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] self._handle_fault_response(status_code, replybody, resp) [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.571916] env[68443]: ERROR nova.compute.manager [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] [ 2245.577200] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bfd60e-be32-4aef-8ddb-45dd97ff51f1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.584546] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db91161b-7841-4da1-b9ac-1a21c5b82d9c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.613757] env[68443]: DEBUG nova.network.neutron [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2245.615283] env[68443]: DEBUG oslo_concurrency.lockutils [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "062710e8-2ccb-4926-97ce-bf6a9fa4d10c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.429s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.616686] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae334d8b-f175-424c-bb67-09cb6b7c999c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.624092] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Releasing lock "refresh_cache-963d7f65-a761-4ce1-b6d1-fc987c3111c0" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2245.624486] env[68443]: DEBUG nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2245.624683] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2245.627165] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5517a1-0aa4-470f-b2eb-2324a541d5a3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.631065] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f6c8aca-4c8e-4044-a278-17064bc2a147 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.642466] env[68443]: DEBUG nova.compute.provider_tree [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2245.648985] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adedb25-3126-43be-baea-482e4554d562 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.659995] env[68443]: DEBUG nova.scheduler.client.report [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2245.669544] env[68443]: INFO nova.compute.manager [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 062710e8-2ccb-4926-97ce-bf6a9fa4d10c] Successfully reverted task state from None on failure for instance. [ 2245.681682] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 963d7f65-a761-4ce1-b6d1-fc987c3111c0 could not be found. [ 2245.681875] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2245.682063] env[68443]: INFO nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2245.682311] env[68443]: DEBUG oslo.service.loopingcall [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server [None req-9ed7767f-acef-4e90-8d86-1d492102e381 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-182aa2f6-a5e3-4ce2-b47d-11a8244813b9'] [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2245.683391] env[68443]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.683815] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2245.684258] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2245.684637] env[68443]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.685022] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.685413] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.685837] env[68443]: ERROR oslo_messaging.rpc.server [ 2245.685837] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.294s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.685837] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2245.686734] env[68443]: DEBUG nova.compute.manager [-] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2245.686820] env[68443]: DEBUG nova.network.neutron [-] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2245.721476] env[68443]: DEBUG nova.compute.utils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2245.723148] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2245.723315] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2245.732315] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2245.790450] env[68443]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68443) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2245.790684] env[68443]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-f8d79307-f78f-4758-a9fb-e7e0bc20896a'] [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.791223] env[68443]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.791677] env[68443]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.792067] env[68443]: ERROR oslo.service.loopingcall [ 2245.792502] env[68443]: ERROR nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.816681] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2245.820569] env[68443]: DEBUG nova.policy [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5394eee9936641f986136eee619d6c2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d4692d4df3948b98eae443eebb5239b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2245.823985] env[68443]: ERROR nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] exception_handler_v20(status_code, error_body) [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise client_exc(message=error_message, [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Neutron server returns request_ids: ['req-f8d79307-f78f-4758-a9fb-e7e0bc20896a'] [ 2245.823985] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] During handling of the above exception, another exception occurred: [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Traceback (most recent call last): [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._delete_instance(context, instance, bdms) [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._shutdown_instance(context, instance, bdms) [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._try_deallocate_network(context, instance, requested_networks) [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] with excutils.save_and_reraise_exception(): [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.824345] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self.force_reraise() [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise self.value [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] _deallocate_network_with_retries() [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return evt.wait() [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = hub.switch() [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.greenlet.switch() [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.824690] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = func(*self.args, **self.kw) [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] result = f(*args, **kwargs) [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._deallocate_network( [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self.network_api.deallocate_for_instance( [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] data = neutron.list_ports(**search_opts) [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.list('ports', self.ports_path, retrieve_all, [ 2245.825012] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] for r in self._pagination(collection, path, **params): [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] res = self.get(path, params=params) [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.retry_request("GET", action, body=body, [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.825438] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] return self.do_request(method, action, body=body, [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] ret = obj(*args, **kwargs) [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] self._handle_fault_response(status_code, replybody, resp) [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.825782] env[68443]: ERROR nova.compute.manager [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] [ 2245.843156] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2245.843380] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2245.843542] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2245.843722] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2245.843869] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2245.844029] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2245.844242] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2245.844400] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2245.844562] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2245.844719] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2245.844886] env[68443]: DEBUG nova.virt.hardware [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2245.845940] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a55124d-da21-4e0b-a8d4-542e2662e16d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.850771] env[68443]: DEBUG oslo_concurrency.lockutils [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.345s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.853741] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 351.771s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2245.853929] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] During sync_power_state the instance has a pending task (deleting). Skip. [ 2245.854118] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "963d7f65-a761-4ce1-b6d1-fc987c3111c0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.855935] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa5dbc8-d127-45e4-a831-8b0c86379022 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.909051] env[68443]: INFO nova.compute.manager [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] [instance: 963d7f65-a761-4ce1-b6d1-fc987c3111c0] Successfully reverted task state from None on failure for instance. [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server [None req-d818ae68-d6a3-4cb6-b27f-51c2792df050 tempest-ServerShowV247Test-182818576 tempest-ServerShowV247Test-182818576-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-f8d79307-f78f-4758-a9fb-e7e0bc20896a'] [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2245.912437] env[68443]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.912941] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2245.913407] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server raise self.value [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2245.913856] env[68443]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.914294] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2245.914753] env[68443]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2245.915221] env[68443]: ERROR oslo_messaging.rpc.server [ 2246.120318] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Successfully created port: cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2246.605772] env[68443]: DEBUG nova.compute.manager [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Received event network-vif-plugged-cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2246.605997] env[68443]: DEBUG oslo_concurrency.lockutils [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] Acquiring lock "d19509cf-7828-4e55-bf2b-4c57b9eab217-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.606225] env[68443]: DEBUG oslo_concurrency.lockutils [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] Lock "d19509cf-7828-4e55-bf2b-4c57b9eab217-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.606396] env[68443]: DEBUG oslo_concurrency.lockutils [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] Lock "d19509cf-7828-4e55-bf2b-4c57b9eab217-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.606562] env[68443]: DEBUG nova.compute.manager [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] No waiting events found dispatching network-vif-plugged-cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2246.606730] env[68443]: WARNING nova.compute.manager [req-048217a0-4961-4a4f-91e5-a47308427674 req-4479e820-f88c-4690-a814-f0d50f762b44 service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Received unexpected event network-vif-plugged-cbdc7132-c758-4e5f-a646-bc243ddebbbc for instance with vm_state building and task_state spawning. [ 2246.693938] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Successfully updated port: cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2246.706914] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2246.707074] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2246.707221] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2246.746532] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2246.824206] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.913634] env[68443]: DEBUG nova.network.neutron [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Updating instance_info_cache with network_info: [{"id": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "address": "fa:16:3e:d6:06:81", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbdc7132-c7", "ovs_interfaceid": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.924830] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.925119] env[68443]: DEBUG nova.compute.manager [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Instance network_info: |[{"id": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "address": "fa:16:3e:d6:06:81", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbdc7132-c7", "ovs_interfaceid": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2246.925511] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:06:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '095fbf26-7367-4f9e-87c5-2965b64b0b0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbdc7132-c758-4e5f-a646-bc243ddebbbc', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2246.933474] env[68443]: DEBUG oslo.service.loopingcall [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2246.933980] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2246.934343] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6abeb64-5767-4d55-aa5f-31262e752b73 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.955563] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2246.955563] env[68443]: value = "task-3374092" [ 2246.955563] env[68443]: _type = "Task" [ 2246.955563] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.965637] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374092, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.467486] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374092, 'name': CreateVM_Task, 'duration_secs': 0.283008} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.467645] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2247.468299] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2247.468465] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2247.468767] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2247.469015] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c56aa5e-9cf6-40bf-8467-510cd29be1ea {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.473299] env[68443]: DEBUG oslo_vmware.api [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2247.473299] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52ec7aed-ff74-1b96-a102-4c8417d129cd" [ 2247.473299] env[68443]: _type = "Task" [ 2247.473299] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.480352] env[68443]: DEBUG oslo_vmware.api [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52ec7aed-ff74-1b96-a102-4c8417d129cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.824845] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.984131] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2247.984395] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2247.984607] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6d0dbd4c-415c-45d6-bb9c-694694a324e2 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.633090] env[68443]: DEBUG nova.compute.manager [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Received event network-changed-cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2248.633316] env[68443]: DEBUG nova.compute.manager [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Refreshing instance network info cache due to event network-changed-cbdc7132-c758-4e5f-a646-bc243ddebbbc. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2248.633574] env[68443]: DEBUG oslo_concurrency.lockutils [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] Acquiring lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2248.633732] env[68443]: DEBUG oslo_concurrency.lockutils [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] Acquired lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2248.633903] env[68443]: DEBUG nova.network.neutron [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Refreshing network info cache for port cbdc7132-c758-4e5f-a646-bc243ddebbbc {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2248.879683] env[68443]: DEBUG nova.network.neutron [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Updated VIF entry in instance network info cache for port cbdc7132-c758-4e5f-a646-bc243ddebbbc. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2248.880100] env[68443]: DEBUG nova.network.neutron [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Updating instance_info_cache with network_info: [{"id": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "address": "fa:16:3e:d6:06:81", "network": {"id": "986011ca-8616-45ae-ad5c-074dc2c4637d", "bridge": "br-int", "label": "tempest-ServersTestJSON-393504262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d4692d4df3948b98eae443eebb5239b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "095fbf26-7367-4f9e-87c5-2965b64b0b0f", "external-id": "nsx-vlan-transportzone-777", "segmentation_id": 777, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbdc7132-c7", "ovs_interfaceid": "cbdc7132-c758-4e5f-a646-bc243ddebbbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2248.890957] env[68443]: DEBUG oslo_concurrency.lockutils [req-4833ed72-a129-4df0-b731-2d13ae2e7b37 req-061adc2e-ff73-49e7-bae7-c007214141ff service nova] Releasing lock "refresh_cache-d19509cf-7828-4e55-bf2b-4c57b9eab217" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2253.820963] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.628812] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.629146] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2290.639513] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Starting instance... {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2290.688259] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2290.688507] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2290.689919] env[68443]: INFO nova.compute.claims [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2290.835259] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490e227a-3e97-420c-bc77-96f45e1482e7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.842901] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb5b49b-6d0a-4321-a4ea-b5f79bc824e7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.872296] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beaec73d-16f3-441b-83cf-d0c1d79d8361 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.879646] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c108273-09ef-43d0-bc6d-ca5c336fd117 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.893450] env[68443]: DEBUG nova.compute.provider_tree [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2290.903941] env[68443]: DEBUG nova.scheduler.client.report [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2290.917323] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.229s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.917805] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Start building networks asynchronously for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2290.949191] env[68443]: DEBUG nova.compute.utils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Using /dev/sd instead of None {{(pid=68443) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2290.951285] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Allocating IP information in the background. {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2290.951464] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] allocate_for_instance() {{(pid=68443) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2290.959580] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Start building block device mappings for instance. {{(pid=68443) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2291.016766] env[68443]: DEBUG nova.policy [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd97a934ab8f48e2bf883cc4dddcdde1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dafe4b3f7d243caa51d39bfc74a4c11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68443) authorize /opt/stack/nova/nova/policy.py:203}} [ 2291.025058] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Start spawning the instance on the hypervisor. {{(pid=68443) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2291.051390] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-08T18:48:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-08T18:48:12Z,direct_url=,disk_format='vmdk',id=a80a8b97-4d56-4702-9b02-9d115fcd6710,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f4f2a1e220914ec3b281775c224df247',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-08T18:48:13Z,virtual_size=,visibility=), allow threads: False {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2291.051714] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2291.052141] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image limits 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2291.052141] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Flavor pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2291.052342] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Image pref 0:0:0 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2291.052529] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68443) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2291.052780] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2291.052988] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2291.053358] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Got 1 possible topologies {{(pid=68443) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2291.053450] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2291.053678] env[68443]: DEBUG nova.virt.hardware [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68443) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2291.054557] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a28713-304e-4726-af76-2b233560b823 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.063041] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fd9cef-fe9d-4fbb-8c42-d951b7bbd377 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.350761] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Successfully created port: 1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2292.072090] env[68443]: DEBUG nova.compute.manager [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Received event network-vif-plugged-1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2292.072370] env[68443]: DEBUG oslo_concurrency.lockutils [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] Acquiring lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2292.072539] env[68443]: DEBUG oslo_concurrency.lockutils [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] Lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2292.072762] env[68443]: DEBUG oslo_concurrency.lockutils [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] Lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2292.072980] env[68443]: DEBUG nova.compute.manager [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] No waiting events found dispatching network-vif-plugged-1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2292.073203] env[68443]: WARNING nova.compute.manager [req-0accfa2f-39b6-4547-a87e-6f2713b56899 req-b8fd26f0-b0d8-4574-9937-e3eddcec0104 service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Received unexpected event network-vif-plugged-1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 for instance with vm_state building and task_state spawning. [ 2292.153236] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Successfully updated port: 1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2292.169835] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2292.170052] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2292.170255] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Building network info cache for instance {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2292.212145] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Instance cache missing network info. {{(pid=68443) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2292.393032] env[68443]: DEBUG nova.network.neutron [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Updating instance_info_cache with network_info: [{"id": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "address": "fa:16:3e:31:96:47", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c370d5e-ef", "ovs_interfaceid": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2292.406574] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2292.406978] env[68443]: DEBUG nova.compute.manager [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Instance network_info: |[{"id": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "address": "fa:16:3e:31:96:47", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c370d5e-ef", "ovs_interfaceid": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68443) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2292.407673] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:96:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18', 'vif_model': 'vmxnet3'}] {{(pid=68443) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2292.415558] env[68443]: DEBUG oslo.service.loopingcall [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2292.416042] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Creating VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2292.416295] env[68443]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38435cc5-5a9d-43a3-b78d-078f7a64d7aa {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.438626] env[68443]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2292.438626] env[68443]: value = "task-3374093" [ 2292.438626] env[68443]: _type = "Task" [ 2292.438626] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.447164] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374093, 'name': CreateVM_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.949322] env[68443]: DEBUG oslo_vmware.api [-] Task: {'id': task-3374093, 'name': CreateVM_Task, 'duration_secs': 0.301117} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.949487] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Created VM on the ESX host {{(pid=68443) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2292.950146] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2292.950345] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2292.950615] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2292.950905] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cd7bed1-aeb2-4bd6-8556-a86e313fefef {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.955585] env[68443]: DEBUG oslo_vmware.api [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 2292.955585] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5282fe46-9a0c-562e-6d7f-d02aa61b9006" [ 2292.955585] env[68443]: _type = "Task" [ 2292.955585] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.964709] env[68443]: DEBUG oslo_vmware.api [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5282fe46-9a0c-562e-6d7f-d02aa61b9006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.466473] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2293.466733] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Processing image a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2293.466932] env[68443]: DEBUG oslo_concurrency.lockutils [None req-41195292-c223-47db-9947-4197e6044dfe tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2294.097417] env[68443]: DEBUG nova.compute.manager [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Received event network-changed-1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2294.097618] env[68443]: DEBUG nova.compute.manager [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Refreshing instance network info cache due to event network-changed-1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18. {{(pid=68443) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2294.097827] env[68443]: DEBUG oslo_concurrency.lockutils [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] Acquiring lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2294.098045] env[68443]: DEBUG oslo_concurrency.lockutils [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] Acquired lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2294.098264] env[68443]: DEBUG nova.network.neutron [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Refreshing network info cache for port 1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18 {{(pid=68443) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2294.186699] env[68443]: WARNING oslo_vmware.rw_handles [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2294.186699] env[68443]: ERROR oslo_vmware.rw_handles [ 2294.187130] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2294.189009] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2294.189275] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Copying Virtual Disk [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/f9a17836-64ed-4685-89db-324ffb59f2ef/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2294.189530] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73002c4f-943b-4adb-ae4e-ba02c65e51ad {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.197876] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for the task: (returnval){ [ 2294.197876] env[68443]: value = "task-3374094" [ 2294.197876] env[68443]: _type = "Task" [ 2294.197876] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.205523] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Task: {'id': task-3374094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.358949] env[68443]: DEBUG nova.network.neutron [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Updated VIF entry in instance network info cache for port 1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18. {{(pid=68443) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2294.359327] env[68443]: DEBUG nova.network.neutron [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Updating instance_info_cache with network_info: [{"id": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "address": "fa:16:3e:31:96:47", "network": {"id": "03d4bfbc-f6ba-4fd1-8bf5-dde287b9760f", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1378866790-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3dafe4b3f7d243caa51d39bfc74a4c11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c370d5e-ef", "ovs_interfaceid": "1c370d5e-efb0-4b5d-93a7-3f84cfa5eb18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2294.369853] env[68443]: DEBUG oslo_concurrency.lockutils [req-7afba02e-c067-49c5-82fe-7d49fba4238d req-ff8009e5-4064-43f8-99fa-6aeeb53cc63f service nova] Releasing lock "refresh_cache-f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2294.707846] env[68443]: DEBUG oslo_vmware.exceptions [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2294.708215] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2294.708654] env[68443]: ERROR nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2294.708654] env[68443]: Faults: ['InvalidArgument'] [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] Traceback (most recent call last): [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] yield resources [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self.driver.spawn(context, instance, image_meta, [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self._fetch_image_if_missing(context, vi) [ 2294.708654] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] image_cache(vi, tmp_image_ds_loc) [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] vm_util.copy_virtual_disk( [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] session._wait_for_task(vmdk_copy_task) [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return self.wait_for_task(task_ref) [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return evt.wait() [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] result = hub.switch() [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2294.709120] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return self.greenlet.switch() [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self.f(*self.args, **self.kw) [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] raise exceptions.translate_fault(task_info.error) [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] Faults: ['InvalidArgument'] [ 2294.709506] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] [ 2294.709506] env[68443]: INFO nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Terminating instance [ 2294.710465] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2294.710694] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2294.710932] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-940dbb41-104b-42e8-a8ea-ff6bd616dcb6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.713093] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2294.713298] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2294.713985] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6c12b7-40ba-4cf3-aded-22e1965c2cb3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.720502] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2294.720738] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd70a3d5-7e4b-4ea4-951d-52e86232c38c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.722752] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2294.722926] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2294.723842] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7235dcf3-52d3-4873-a205-98beef0a1693 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.728091] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 2294.728091] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52425932-a05d-fc92-4941-bc02874c8464" [ 2294.728091] env[68443]: _type = "Task" [ 2294.728091] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.736652] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52425932-a05d-fc92-4941-bc02874c8464, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.793194] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2294.793412] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2294.793591] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Deleting the datastore file [datastore1] 12b39079-051e-4997-9fa1-7e467af04306 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2294.793846] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55797912-8865-4740-9da6-cd027c8ef246 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.799967] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for the task: (returnval){ [ 2294.799967] env[68443]: value = "task-3374096" [ 2294.799967] env[68443]: _type = "Task" [ 2294.799967] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.807167] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Task: {'id': task-3374096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.238494] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2295.238733] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2295.238975] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11cf1b03-4e08-455a-b123-3e65d2f1e20a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.250803] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2295.251012] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Fetch image to [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2295.251198] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2295.251920] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0261967a-3cdf-406c-8a3d-f1c337f8625e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.258515] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8480a3-fcae-4863-b430-6a75d47839a7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.267305] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f455342d-891c-4d67-9424-41d2ebd858bd {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.297786] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2470ff-f045-4b65-9eae-be7e2c230727 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.305328] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3b6ddc24-b6eb-451f-b685-564917859521 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.309351] env[68443]: DEBUG oslo_vmware.api [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Task: {'id': task-3374096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077457} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.309844] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2295.310037] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2295.310210] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2295.310379] env[68443]: INFO nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2295.312441] env[68443]: DEBUG nova.compute.claims [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2295.312610] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.312825] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.327613] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2295.379307] env[68443]: DEBUG oslo_vmware.rw_handles [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2295.439344] env[68443]: DEBUG oslo_vmware.rw_handles [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2295.439537] env[68443]: DEBUG oslo_vmware.rw_handles [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2295.519682] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628bd00b-0cc5-42e8-b80e-ea03633eab30 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.527683] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ea4a73-c1f6-40d9-8c43-2abf9fdb1028 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.556835] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd578c7d-bfeb-4231-8e38-cf18979eb238 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.564083] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2eb1be-01c5-4bae-85fe-34f257f0c432 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.576872] env[68443]: DEBUG nova.compute.provider_tree [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2295.588323] env[68443]: DEBUG nova.scheduler.client.report [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2295.601391] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.288s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.601917] env[68443]: ERROR nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2295.601917] env[68443]: Faults: ['InvalidArgument'] [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] Traceback (most recent call last): [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self.driver.spawn(context, instance, image_meta, [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self._fetch_image_if_missing(context, vi) [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] image_cache(vi, tmp_image_ds_loc) [ 2295.601917] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] vm_util.copy_virtual_disk( [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] session._wait_for_task(vmdk_copy_task) [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return self.wait_for_task(task_ref) [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return evt.wait() [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] result = hub.switch() [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] return self.greenlet.switch() [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2295.602314] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] self.f(*self.args, **self.kw) [ 2295.602751] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2295.602751] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] raise exceptions.translate_fault(task_info.error) [ 2295.602751] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2295.602751] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] Faults: ['InvalidArgument'] [ 2295.602751] env[68443]: ERROR nova.compute.manager [instance: 12b39079-051e-4997-9fa1-7e467af04306] [ 2295.602751] env[68443]: DEBUG nova.compute.utils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2295.603987] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Build of instance 12b39079-051e-4997-9fa1-7e467af04306 was re-scheduled: A specified parameter was not correct: fileType [ 2295.603987] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2295.604370] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2295.604547] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2295.604716] env[68443]: DEBUG nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2295.604881] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2295.824811] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.825150] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2295.834857] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.835083] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.835257] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.835413] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2295.836593] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3877cf15-12a9-43e0-800e-a8774952adfe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.846342] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e5447b-2f78-4093-b22d-6f3197b22387 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.860829] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b0e904-2e9a-48f2-997d-410b1f695d26 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.867348] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a81be7-705c-4786-9ea9-f5f189f958df {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.896611] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180915MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2295.896796] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.896973] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.981275] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 12b39079-051e-4997-9fa1-7e467af04306 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2295.981483] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 6333b256-471f-485d-b099-21fa82349319 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.981891] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.981891] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.981891] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.982089] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.982153] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 434a8f2e-7c08-4b16-b255-45b168679f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.982252] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance d19509cf-7828-4e55-bf2b-4c57b9eab217 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.982369] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2295.982562] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2295.982697] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2296.003118] env[68443]: DEBUG nova.network.neutron [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2296.019874] env[68443]: INFO nova.compute.manager [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Took 0.41 seconds to deallocate network for instance. [ 2296.114079] env[68443]: INFO nova.scheduler.client.report [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Deleted allocations for instance 12b39079-051e-4997-9fa1-7e467af04306 [ 2296.121058] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a04d19-2da5-4dc5-a370-b50bc897b54a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.130032] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2f1176-7f57-4009-8eec-e1abb156da6d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.162537] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b8efa5-95fa-48e8-a604-177f895b4022 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.165312] env[68443]: DEBUG oslo_concurrency.lockutils [None req-a8235ca0-b136-4f22-8938-dae0fc0e597b tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.141s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.165792] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.525s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.166024] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Acquiring lock "12b39079-051e-4997-9fa1-7e467af04306-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.166235] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.166400] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.168641] env[68443]: INFO nova.compute.manager [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Terminating instance [ 2296.172715] env[68443]: DEBUG nova.compute.manager [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2296.172928] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2296.174191] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a9bbb9-0535-42db-882d-c5772df2dc4b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.178108] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf1f6403-a1d5-462c-9adf-d29a3ef63d2c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.190067] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2296.194653] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e28e8-d7e6-4790-9675-b2452206c811 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.205602] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2296.227573] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 12b39079-051e-4997-9fa1-7e467af04306 could not be found. [ 2296.227797] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2296.227954] env[68443]: INFO nova.compute.manager [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2296.228223] env[68443]: DEBUG oslo.service.loopingcall [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2296.228840] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2296.229033] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.332s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.229237] env[68443]: DEBUG nova.compute.manager [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2296.229336] env[68443]: DEBUG nova.network.neutron [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2296.254368] env[68443]: DEBUG nova.network.neutron [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2296.262211] env[68443]: INFO nova.compute.manager [-] [instance: 12b39079-051e-4997-9fa1-7e467af04306] Took 0.03 seconds to deallocate network for instance. [ 2296.352305] env[68443]: DEBUG oslo_concurrency.lockutils [None req-70cd0dd9-2c4b-4ae6-9d99-98b7b5fa4c07 tempest-InstanceActionsTestJSON-16609546 tempest-InstanceActionsTestJSON-16609546-project-member] Lock "12b39079-051e-4997-9fa1-7e467af04306" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.353170] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "12b39079-051e-4997-9fa1-7e467af04306" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 402.270s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.353361] env[68443]: INFO nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 12b39079-051e-4997-9fa1-7e467af04306] During sync_power_state the instance has a pending task (deleting). Skip. [ 2296.353535] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "12b39079-051e-4997-9fa1-7e467af04306" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2299.232296] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.825874] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.825874] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2300.825174] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.825526] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2300.825526] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2300.846034] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 6333b256-471f-485d-b099-21fa82349319] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846213] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846327] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846455] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846578] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846699] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.846822] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.847062] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2300.847201] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2304.825051] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2305.819933] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.824830] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.825371] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.752826] env[68443]: DEBUG oslo_concurrency.lockutils [None req-bcc27fb3-8b21-421f-9ec1-4c29aa3e84e8 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "83198ee3-dbb4-4088-b889-1aa9196f0b92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2344.208454] env[68443]: WARNING oslo_vmware.rw_handles [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2344.208454] env[68443]: ERROR oslo_vmware.rw_handles [ 2344.209082] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2344.210671] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2344.210910] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Copying Virtual Disk [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/579f02d6-0879-4eb4-8f4e-edfd2bc92dc6/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2344.211213] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7ade070-31af-4e05-bb05-89300d2e354e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.219177] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 2344.219177] env[68443]: value = "task-3374097" [ 2344.219177] env[68443]: _type = "Task" [ 2344.219177] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.226808] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.729534] env[68443]: DEBUG oslo_vmware.exceptions [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2344.729797] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2344.730371] env[68443]: ERROR nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2344.730371] env[68443]: Faults: ['InvalidArgument'] [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] Traceback (most recent call last): [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] yield resources [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self.driver.spawn(context, instance, image_meta, [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self._fetch_image_if_missing(context, vi) [ 2344.730371] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] image_cache(vi, tmp_image_ds_loc) [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] vm_util.copy_virtual_disk( [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] session._wait_for_task(vmdk_copy_task) [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return self.wait_for_task(task_ref) [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return evt.wait() [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] result = hub.switch() [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2344.730717] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return self.greenlet.switch() [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self.f(*self.args, **self.kw) [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] raise exceptions.translate_fault(task_info.error) [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] Faults: ['InvalidArgument'] [ 2344.731046] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] [ 2344.731046] env[68443]: INFO nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Terminating instance [ 2344.732239] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2344.732471] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2344.732725] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1320c495-492b-4e71-9224-e6711be2106e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.734922] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2344.735132] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2344.735823] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e710dc-2b1b-4a93-b849-7dc1a8f0cf35 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.742103] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2344.742305] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb852b18-cffb-4d00-8127-ffae36af9deb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.744375] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2344.744584] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2344.745521] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a7a2c8f-3646-47f4-b260-6d51a0828f37 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.750358] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 2344.750358] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]528b26f1-0cd4-0489-2a26-95ac8292f8a7" [ 2344.750358] env[68443]: _type = "Task" [ 2344.750358] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.764232] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2344.764459] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating directory with path [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2344.764670] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71271f38-a3c9-483e-8288-d20234b448a4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.784782] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Created directory with path [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2344.784979] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Fetch image to [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2344.785167] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2344.785902] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0811048f-7fe9-4e2a-8a8b-a6b96f6f6a21 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.793914] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03c03a7-3a95-4a55-b8d3-cc587ba57b1a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.806435] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c51c71-e284-4716-994e-796dbc7bb31d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.840388] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4445de72-9eb6-4940-9060-066aacf7152b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.842773] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2344.842968] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2344.843156] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleting the datastore file [datastore1] 6333b256-471f-485d-b099-21fa82349319 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2344.843373] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8acc9df9-912e-4d09-9e6d-233857995571 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.847902] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c80f572e-9c93-4370-b80c-9561bbf2efce {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.850474] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 2344.850474] env[68443]: value = "task-3374099" [ 2344.850474] env[68443]: _type = "Task" [ 2344.850474] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.858703] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.868557] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2344.984708] env[68443]: DEBUG oslo_vmware.rw_handles [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2345.045784] env[68443]: DEBUG oslo_vmware.rw_handles [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2345.045994] env[68443]: DEBUG oslo_vmware.rw_handles [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2345.360737] env[68443]: DEBUG oslo_vmware.api [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': task-3374099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070894} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2345.361019] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2345.361115] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2345.361291] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2345.361465] env[68443]: INFO nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2345.363680] env[68443]: DEBUG nova.compute.claims [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2345.363859] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2345.364085] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2345.510255] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66669486-3ebd-4ad9-8c04-192f459c8660 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.517494] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48839a4-fa04-4264-983c-c2ec236cf7ec {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.548160] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e1a940-c01d-4f85-8bb2-7c4ee2f64947 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.555370] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a3eb8b-2401-4187-9ac4-ebc5c680ebfe {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2345.568246] env[68443]: DEBUG nova.compute.provider_tree [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2345.576291] env[68443]: DEBUG nova.scheduler.client.report [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2345.591715] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2345.592219] env[68443]: ERROR nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2345.592219] env[68443]: Faults: ['InvalidArgument'] [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] Traceback (most recent call last): [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self.driver.spawn(context, instance, image_meta, [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self._fetch_image_if_missing(context, vi) [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] image_cache(vi, tmp_image_ds_loc) [ 2345.592219] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] vm_util.copy_virtual_disk( [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] session._wait_for_task(vmdk_copy_task) [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return self.wait_for_task(task_ref) [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return evt.wait() [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] result = hub.switch() [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] return self.greenlet.switch() [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2345.592565] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] self.f(*self.args, **self.kw) [ 2345.592919] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2345.592919] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] raise exceptions.translate_fault(task_info.error) [ 2345.592919] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2345.592919] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] Faults: ['InvalidArgument'] [ 2345.592919] env[68443]: ERROR nova.compute.manager [instance: 6333b256-471f-485d-b099-21fa82349319] [ 2345.592919] env[68443]: DEBUG nova.compute.utils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2345.594731] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Build of instance 6333b256-471f-485d-b099-21fa82349319 was re-scheduled: A specified parameter was not correct: fileType [ 2345.594731] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2345.595121] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2345.595323] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2345.595462] env[68443]: DEBUG nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2345.595627] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2345.877147] env[68443]: DEBUG nova.network.neutron [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2345.889527] env[68443]: INFO nova.compute.manager [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Took 0.29 seconds to deallocate network for instance. [ 2345.990915] env[68443]: INFO nova.scheduler.client.report [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Deleted allocations for instance 6333b256-471f-485d-b099-21fa82349319 [ 2346.021822] env[68443]: DEBUG oslo_concurrency.lockutils [None req-71686dad-e279-4b9a-9394-efbfa9bd30ae tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 612.070s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.023086] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 416.675s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.023086] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquiring lock "6333b256-471f-485d-b099-21fa82349319-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2346.023086] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.023481] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.025545] env[68443]: INFO nova.compute.manager [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Terminating instance [ 2346.028228] env[68443]: DEBUG nova.compute.manager [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2346.028228] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2346.028693] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7041b430-091a-446d-98cb-f4d24b1dd50b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.040030] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e4bb7f-e61b-44c4-8916-12b0aa20d07b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.068489] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6333b256-471f-485d-b099-21fa82349319 could not be found. [ 2346.068724] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2346.068873] env[68443]: INFO nova.compute.manager [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 6333b256-471f-485d-b099-21fa82349319] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2346.069142] env[68443]: DEBUG oslo.service.loopingcall [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2346.069385] env[68443]: DEBUG nova.compute.manager [-] [instance: 6333b256-471f-485d-b099-21fa82349319] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2346.069509] env[68443]: DEBUG nova.network.neutron [-] [instance: 6333b256-471f-485d-b099-21fa82349319] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2346.093985] env[68443]: DEBUG nova.network.neutron [-] [instance: 6333b256-471f-485d-b099-21fa82349319] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2346.102454] env[68443]: INFO nova.compute.manager [-] [instance: 6333b256-471f-485d-b099-21fa82349319] Took 0.03 seconds to deallocate network for instance. [ 2346.194638] env[68443]: DEBUG oslo_concurrency.lockutils [None req-1a1a9584-8315-4259-8a60-d816b2f7fb9d tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Lock "6333b256-471f-485d-b099-21fa82349319" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.824917] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2356.825870] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2356.837478] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2356.837718] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2356.837924] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2356.838101] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2356.839396] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf1a90b-623c-4e12-951c-0d4bc06eed32 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.848832] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b54fed6-5f3a-431c-bbb1-fa4602c4e559 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.862848] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b548884-f894-4649-b03e-be74f232b187 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.869218] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f1ae27-b282-41d0-97a4-96ae9471d7cc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2356.898280] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2356.898429] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2356.898620] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2356.961234] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.961429] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.961564] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.961686] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.961805] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 434a8f2e-7c08-4b16-b255-45b168679f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.961922] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance d19509cf-7828-4e55-bf2b-4c57b9eab217 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.962049] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2356.962228] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2356.962362] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2357.048311] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119d8464-f6b6-4dc2-a152-18b37340c574 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.056196] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9006cf4-8f94-47c2-9f84-89e39466faa4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.085441] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0e993e-c1c5-4e07-8213-181481ce6c63 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.092294] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8c284a-dee5-4873-a9bf-b2d085c8dc42 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.104833] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2357.113070] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2357.127651] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2357.127651] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.229s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.127196] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.825288] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.825530] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2362.826401] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.826763] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2362.826763] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2362.843930] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844114] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844253] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844383] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844508] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844634] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844758] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.844879] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2365.839201] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2366.825466] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.825519] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.826983] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.823168] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.228122] env[68443]: WARNING oslo_vmware.rw_handles [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2394.228122] env[68443]: ERROR oslo_vmware.rw_handles [ 2394.228122] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2394.229803] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2394.230066] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Copying Virtual Disk [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/87f6eb1e-04c7-4d98-bab2-99058f734089/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2394.230405] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d91332a4-ecf1-4229-8136-2923db4ba8f3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.238635] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 2394.238635] env[68443]: value = "task-3374100" [ 2394.238635] env[68443]: _type = "Task" [ 2394.238635] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2394.246170] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2394.748977] env[68443]: DEBUG oslo_vmware.exceptions [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2394.749280] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2394.749839] env[68443]: ERROR nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2394.749839] env[68443]: Faults: ['InvalidArgument'] [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Traceback (most recent call last): [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] yield resources [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self.driver.spawn(context, instance, image_meta, [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self._fetch_image_if_missing(context, vi) [ 2394.749839] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] image_cache(vi, tmp_image_ds_loc) [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] vm_util.copy_virtual_disk( [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] session._wait_for_task(vmdk_copy_task) [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return self.wait_for_task(task_ref) [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return evt.wait() [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] result = hub.switch() [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2394.750261] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return self.greenlet.switch() [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self.f(*self.args, **self.kw) [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] raise exceptions.translate_fault(task_info.error) [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Faults: ['InvalidArgument'] [ 2394.750722] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] [ 2394.750722] env[68443]: INFO nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Terminating instance [ 2394.751723] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2394.751939] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2394.752192] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2a511eb-47aa-4e33-8c16-7ee023fd216f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.755031] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2394.755235] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2394.755958] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ac6aa8-d4ba-4884-8fad-773cecec9b20 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.762805] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2394.763039] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-455ec189-db75-42bc-bf89-ba9a9ff5fcb6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.765190] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2394.765365] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2394.766348] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490f19ce-5814-4243-93d4-4ede85b09809 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.770857] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2394.770857] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]52ce2e9a-0973-5ce5-e354-ba05d288e0a9" [ 2394.770857] env[68443]: _type = "Task" [ 2394.770857] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2394.778657] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]52ce2e9a-0973-5ce5-e354-ba05d288e0a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2394.841114] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2394.841297] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2394.841449] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleting the datastore file [datastore1] a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2394.841699] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2a53909-ecb4-415c-90ab-30b5731a62bb {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2394.847670] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for the task: (returnval){ [ 2394.847670] env[68443]: value = "task-3374102" [ 2394.847670] env[68443]: _type = "Task" [ 2394.847670] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2394.855050] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374102, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2395.280953] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2395.281371] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating directory with path [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2395.281463] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-929fabbd-25d6-4a5a-8229-0fdd6db89128 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.293056] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Created directory with path [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2395.293250] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Fetch image to [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2395.293424] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2395.294150] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d4df3c-51cb-4fd5-906c-f8b856683f7a {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.300629] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6eaf48-db0c-4b60-85a0-4935f31fa099 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.309840] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253dc092-d0c9-4ec6-86d5-05e18b2f1118 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.341314] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c90619-3ecd-4118-905c-82bab61bc2b1 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.346660] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-91b29491-73a2-443a-bb33-a6987101c0b4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.356351] env[68443]: DEBUG oslo_vmware.api [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Task: {'id': task-3374102, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06677} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2395.356578] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2395.356756] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2395.356927] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2395.357114] env[68443]: INFO nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2395.359228] env[68443]: DEBUG nova.compute.claims [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2395.359403] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2395.359613] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2395.368058] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2395.420957] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2395.480951] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2395.481170] env[68443]: DEBUG oslo_vmware.rw_handles [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2395.541148] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc81b48-609e-4a68-8bfd-9270e46fca33 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.548358] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00135e9b-ebb9-40c8-a9a5-98498ebb4493 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.577874] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124df11b-3ba5-4067-869f-5e1410f3b743 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.584468] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd39985d-5050-47e5-b698-846e1dfc0b27 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2395.597008] env[68443]: DEBUG nova.compute.provider_tree [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2395.605332] env[68443]: DEBUG nova.scheduler.client.report [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2395.618501] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.259s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2395.619196] env[68443]: ERROR nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2395.619196] env[68443]: Faults: ['InvalidArgument'] [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Traceback (most recent call last): [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self.driver.spawn(context, instance, image_meta, [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self._fetch_image_if_missing(context, vi) [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] image_cache(vi, tmp_image_ds_loc) [ 2395.619196] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] vm_util.copy_virtual_disk( [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] session._wait_for_task(vmdk_copy_task) [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return self.wait_for_task(task_ref) [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return evt.wait() [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] result = hub.switch() [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] return self.greenlet.switch() [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2395.619541] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] self.f(*self.args, **self.kw) [ 2395.619884] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2395.619884] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] raise exceptions.translate_fault(task_info.error) [ 2395.619884] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2395.619884] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Faults: ['InvalidArgument'] [ 2395.619884] env[68443]: ERROR nova.compute.manager [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] [ 2395.619884] env[68443]: DEBUG nova.compute.utils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2395.621506] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Build of instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 was re-scheduled: A specified parameter was not correct: fileType [ 2395.621506] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2395.621875] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2395.622056] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2395.622233] env[68443]: DEBUG nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2395.622397] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2395.930656] env[68443]: DEBUG nova.network.neutron [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2395.935786] env[68443]: DEBUG oslo_concurrency.lockutils [None req-5264452a-0c53-40c3-9b92-3c7b84d396ce tempest-ServersNegativeTestJSON-608941495 tempest-ServersNegativeTestJSON-608941495-project-member] Acquiring lock "434a8f2e-7c08-4b16-b255-45b168679f49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2395.941731] env[68443]: INFO nova.compute.manager [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Took 0.32 seconds to deallocate network for instance. [ 2396.035398] env[68443]: INFO nova.scheduler.client.report [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Deleted allocations for instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 [ 2396.055148] env[68443]: DEBUG oslo_concurrency.lockutils [None req-955f3c73-8358-4e11-97e4-c4e4d76991cf tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 593.515s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.055412] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 397.407s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2396.055654] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Acquiring lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2396.056094] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2396.056094] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2396.058241] env[68443]: INFO nova.compute.manager [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Terminating instance [ 2396.059891] env[68443]: DEBUG nova.compute.manager [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2396.060020] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2396.060510] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea9d9a2b-d429-48db-bc97-d043a2c23ae9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.069935] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846d9bc6-8598-4e8c-b607-650ee363d435 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.098030] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a077b8cb-2ae3-4a4b-b309-ca8dbca0e412 could not be found. [ 2396.098347] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2396.098546] env[68443]: INFO nova.compute.manager [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2396.098851] env[68443]: DEBUG oslo.service.loopingcall [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2396.099099] env[68443]: DEBUG nova.compute.manager [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2396.099199] env[68443]: DEBUG nova.network.neutron [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2396.136930] env[68443]: DEBUG nova.network.neutron [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2396.145483] env[68443]: INFO nova.compute.manager [-] [instance: a077b8cb-2ae3-4a4b-b309-ca8dbca0e412] Took 0.05 seconds to deallocate network for instance. [ 2396.225387] env[68443]: DEBUG oslo_concurrency.lockutils [None req-48f41597-e519-47d8-9c2d-2961aa35fdc5 tempest-AttachInterfacesTestJSON-2068282523 tempest-AttachInterfacesTestJSON-2068282523-project-member] Lock "a077b8cb-2ae3-4a4b-b309-ca8dbca0e412" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.826373] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2418.825696] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2418.838012] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2418.838238] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.838412] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2418.838572] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2418.839721] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d43c0a-3675-4872-af8f-8412364dc1f4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.848656] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98238111-1d44-4c03-80d9-d49d676d84ab {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.863493] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12580510-84a3-431a-b70f-4d11767daf3d {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.869494] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d4e680-43e5-4dbd-8d95-c21fe1b06c79 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2418.897312] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180968MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2418.897455] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2418.897642] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2418.951535] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.951691] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.951820] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.951944] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 434a8f2e-7c08-4b16-b255-45b168679f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.952080] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance d19509cf-7828-4e55-bf2b-4c57b9eab217 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.952203] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2418.952377] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2418.952509] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2419.025442] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ba328f-3a75-462f-a63f-7f95868f0d4e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.032890] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef7199e-b805-4b1b-964d-7da40d29e17f {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.063012] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6d39ac-0a3e-4e3b-9138-817acc5acbba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.070042] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e8cea3-efdb-4ef6-acc6-c88aa492e2b4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.082903] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2419.091456] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2419.106207] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2419.106396] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.209s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2421.106881] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.825069] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.825252] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2422.825983] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.826348] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2422.826348] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2422.841502] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.841655] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.841793] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.841923] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.842061] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.842189] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2422.842311] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2425.837871] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2427.825619] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2428.825373] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2430.826910] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.244671] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e3c2cc14-561b-4fa7-bf9c-a72db211e70e tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "d19509cf-7828-4e55-bf2b-4c57b9eab217" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2444.243287] env[68443]: WARNING oslo_vmware.rw_handles [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2444.243287] env[68443]: ERROR oslo_vmware.rw_handles [ 2444.243918] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2444.245516] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2444.245762] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Copying Virtual Disk [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/9d111e2f-0cf6-4dbf-8550-d898083899c1/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2444.246057] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f632bd5b-b653-4e95-b250-cc0e60cab60c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.253630] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2444.253630] env[68443]: value = "task-3374103" [ 2444.253630] env[68443]: _type = "Task" [ 2444.253630] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2444.261685] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2444.763377] env[68443]: DEBUG oslo_vmware.exceptions [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2444.763660] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2444.764502] env[68443]: ERROR nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2444.764502] env[68443]: Faults: ['InvalidArgument'] [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Traceback (most recent call last): [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] yield resources [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self.driver.spawn(context, instance, image_meta, [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self._fetch_image_if_missing(context, vi) [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2444.764502] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] image_cache(vi, tmp_image_ds_loc) [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] vm_util.copy_virtual_disk( [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] session._wait_for_task(vmdk_copy_task) [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return self.wait_for_task(task_ref) [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return evt.wait() [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] result = hub.switch() [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return self.greenlet.switch() [ 2444.764936] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self.f(*self.args, **self.kw) [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] raise exceptions.translate_fault(task_info.error) [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Faults: ['InvalidArgument'] [ 2444.765329] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] [ 2444.765329] env[68443]: INFO nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Terminating instance [ 2444.766112] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2444.766330] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2444.766568] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8af93fa-5c73-4b4b-8d08-4c6af572bad6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.768975] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2444.769178] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2444.769871] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7faeec-72e9-433c-9cef-61368cac214c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.776490] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2444.776698] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6050abac-2e29-4ad9-8f48-3c1f3f8fdaef {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.778754] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2444.778928] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2444.779845] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc8f5c85-42fd-4f75-970f-f6a05594e0ac {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.784620] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 2444.784620] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]523fd18c-d697-8efc-9da9-95f192c0b93a" [ 2444.784620] env[68443]: _type = "Task" [ 2444.784620] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2444.791254] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]523fd18c-d697-8efc-9da9-95f192c0b93a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2444.847266] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2444.847495] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2444.847674] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleting the datastore file [datastore1] 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2444.847973] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c0e29f-537d-4e1f-9844-3b35b4402f04 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.853806] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for the task: (returnval){ [ 2444.853806] env[68443]: value = "task-3374105" [ 2444.853806] env[68443]: _type = "Task" [ 2444.853806] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2444.862855] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2445.295311] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2445.295669] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating directory with path [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2445.295724] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6035934e-bd5c-472f-9bd8-90c67f4c0b1b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.306681] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Created directory with path [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2445.306868] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Fetch image to [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2445.307048] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2445.307764] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1fbfe1-5052-458c-8abf-a2a8f196eed9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.314130] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6e59f9-2f4f-479a-87a9-630e0def6db7 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.322762] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a0fc57-6ef3-4703-a1fa-129fe47f6b23 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.353081] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e8e6ca-06ce-470c-a966-ee2926832fc6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.363706] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef233326-c595-4ac2-a3b2-4d40bc694ca4 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.365343] env[68443]: DEBUG oslo_vmware.api [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Task: {'id': task-3374105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077602} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2445.365580] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2445.365797] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2445.365977] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2445.366166] env[68443]: INFO nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2445.368264] env[68443]: DEBUG nova.compute.claims [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2445.368440] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2445.368648] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2445.385623] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2445.437148] env[68443]: DEBUG oslo_vmware.rw_handles [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2445.498659] env[68443]: DEBUG oslo_vmware.rw_handles [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2445.498866] env[68443]: DEBUG oslo_vmware.rw_handles [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2445.540126] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11fc90f-e870-427e-baad-255d5deb6eb6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.547768] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7ebafe-bd78-43ab-aad6-a32ba670692c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.577771] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbb722b-6989-4128-b1dc-cc6ad164d58c {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.584750] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5f9762-f180-4c50-b0f6-aa91961b6c92 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2445.597622] env[68443]: DEBUG nova.compute.provider_tree [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2445.605666] env[68443]: DEBUG nova.scheduler.client.report [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2445.618298] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.250s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2445.618775] env[68443]: ERROR nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2445.618775] env[68443]: Faults: ['InvalidArgument'] [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Traceback (most recent call last): [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self.driver.spawn(context, instance, image_meta, [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self._fetch_image_if_missing(context, vi) [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] image_cache(vi, tmp_image_ds_loc) [ 2445.618775] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] vm_util.copy_virtual_disk( [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] session._wait_for_task(vmdk_copy_task) [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return self.wait_for_task(task_ref) [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return evt.wait() [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] result = hub.switch() [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] return self.greenlet.switch() [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2445.619134] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] self.f(*self.args, **self.kw) [ 2445.619562] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2445.619562] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] raise exceptions.translate_fault(task_info.error) [ 2445.619562] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2445.619562] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Faults: ['InvalidArgument'] [ 2445.619562] env[68443]: ERROR nova.compute.manager [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] [ 2445.619562] env[68443]: DEBUG nova.compute.utils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2445.620855] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Build of instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 was re-scheduled: A specified parameter was not correct: fileType [ 2445.620855] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2445.621264] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2445.621432] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2445.621602] env[68443]: DEBUG nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2445.621764] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2445.925598] env[68443]: DEBUG nova.network.neutron [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2445.938937] env[68443]: INFO nova.compute.manager [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Took 0.32 seconds to deallocate network for instance. [ 2446.029664] env[68443]: INFO nova.scheduler.client.report [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Deleted allocations for instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 [ 2446.051453] env[68443]: DEBUG oslo_concurrency.lockutils [None req-e69673ea-8cee-4f9e-8760-f32043b42db6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 593.480s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.051765] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 397.124s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.051994] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Acquiring lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2446.052228] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2446.052400] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2446.054718] env[68443]: INFO nova.compute.manager [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Terminating instance [ 2446.057948] env[68443]: DEBUG nova.compute.manager [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2446.057948] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2446.057948] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-beda554e-405b-4cda-902b-83dd0c953ff5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.066552] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a96e3a-974f-4d6d-ac7b-ae688b0e3a98 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2446.093130] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 75ba0bb9-0498-4434-aed1-b03aa0bcaf03 could not be found. [ 2446.093298] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2446.093495] env[68443]: INFO nova.compute.manager [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2446.093750] env[68443]: DEBUG oslo.service.loopingcall [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2446.094216] env[68443]: DEBUG nova.compute.manager [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2446.094317] env[68443]: DEBUG nova.network.neutron [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2446.126052] env[68443]: DEBUG nova.network.neutron [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2446.135129] env[68443]: INFO nova.compute.manager [-] [instance: 75ba0bb9-0498-4434-aed1-b03aa0bcaf03] Took 0.04 seconds to deallocate network for instance. [ 2446.227437] env[68443]: DEBUG oslo_concurrency.lockutils [None req-c60d1c60-244b-4aa8-b21a-7026b2ec07e6 tempest-ServersTestJSON-140288033 tempest-ServersTestJSON-140288033-project-member] Lock "75ba0bb9-0498-4434-aed1-b03aa0bcaf03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2476.062823] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2476.063367] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 2476.063367] env[68443]: value = "domain-c8" [ 2476.063367] env[68443]: _type = "ClusterComputeResource" [ 2476.063367] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2476.064445] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23f0382-6883-4825-9e72-85bda990c780 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2476.078153] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 5 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2477.857800] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2480.825755] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2480.837922] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2480.838160] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.838331] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.838484] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68443) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2480.839624] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166d7005-a9fc-4506-b6af-417d32b5c9ca {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.848323] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9a5cce-f154-4a48-a26a-88bea06fdeef {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.864182] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702d86fa-389f-416e-ba91-9d94d6f14fbc {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.870304] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0437b14f-f235-4918-b7ab-e4516a4f10b3 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.902485] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180987MB free_disk=105GB free_vcpus=48 pci_devices=None {{(pid=68443) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2480.902643] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2480.902842] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2481.039860] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance da00322f-5482-4511-94a4-2e2f3705fb99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2481.040038] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 83198ee3-dbb4-4088-b889-1aa9196f0b92 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2481.040174] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance 434a8f2e-7c08-4b16-b255-45b168679f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2481.040298] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance d19509cf-7828-4e55-bf2b-4c57b9eab217 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2481.040460] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Instance f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68443) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2481.040659] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2481.040798] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68443) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2481.056606] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing inventories for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2481.070492] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating ProviderTree inventory for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2481.070682] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Updating inventory in ProviderTree for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2481.081637] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing aggregate associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, aggregates: None {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2481.101708] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Refreshing trait associations for resource provider feda0f0b-e324-4b78-af74-5e6cfd355a37, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68443) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2481.165358] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eafc92a-a7ac-4d80-9abd-46e9a54c1444 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.172985] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3163fccb-b491-4b72-9cea-86748b2007c9 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.202795] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bdab92-c623-4f70-bccb-e84860eb1cae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.210052] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670bc30a-6612-4420-921c-d08e5d83544b {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2481.224215] env[68443]: DEBUG nova.compute.provider_tree [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2481.232402] env[68443]: DEBUG nova.scheduler.client.report [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2481.245222] env[68443]: DEBUG nova.compute.resource_tracker [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68443) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2481.245389] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.343s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2482.245596] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.825462] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2482.825646] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68443) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2483.824632] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2483.824921] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2483.834753] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] There are 0 instances to clean {{(pid=68443) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2484.835746] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2484.836130] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Starting heal instance info cache {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2484.836130] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Rebuilding the list of instances to heal {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2484.851304] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2484.851474] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2484.851608] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: 434a8f2e-7c08-4b16-b255-45b168679f49] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2484.851739] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: d19509cf-7828-4e55-bf2b-4c57b9eab217] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2484.851862] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] [instance: f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114] Skipping network cache update for instance because it is Building. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2484.851984] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Didn't find any instances for network info cache update. {{(pid=68443) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2485.837060] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2486.503507] env[68443]: DEBUG oslo_concurrency.lockutils [None req-6bda6c06-13a8-4a53-a1cf-c6403bc3a59b tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2488.824631] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2490.825401] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2490.825791] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2492.826045] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2492.826361] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Cleaning up deleted instances with incomplete migration {{(pid=68443) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2494.263506] env[68443]: WARNING oslo_vmware.rw_handles [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles response.begin() [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2494.263506] env[68443]: ERROR oslo_vmware.rw_handles [ 2494.264359] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Downloaded image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2494.265794] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Caching image {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2494.266056] env[68443]: DEBUG nova.virt.vmwareapi.vm_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Copying Virtual Disk [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk to [datastore1] vmware_temp/cbd9721a-9cd4-457b-a3f7-360679b179cc/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk {{(pid=68443) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2494.266344] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5e10c59-f94e-4755-9595-c50dd4e013ba {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.274093] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 2494.274093] env[68443]: value = "task-3374106" [ 2494.274093] env[68443]: _type = "Task" [ 2494.274093] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2494.282446] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2494.784262] env[68443]: DEBUG oslo_vmware.exceptions [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Fault InvalidArgument not matched. {{(pid=68443) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2494.784548] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2494.785191] env[68443]: ERROR nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2494.785191] env[68443]: Faults: ['InvalidArgument'] [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Traceback (most recent call last): [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] yield resources [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self.driver.spawn(context, instance, image_meta, [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self._fetch_image_if_missing(context, vi) [ 2494.785191] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] image_cache(vi, tmp_image_ds_loc) [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] vm_util.copy_virtual_disk( [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] session._wait_for_task(vmdk_copy_task) [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return self.wait_for_task(task_ref) [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return evt.wait() [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] result = hub.switch() [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2494.785520] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return self.greenlet.switch() [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self.f(*self.args, **self.kw) [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] raise exceptions.translate_fault(task_info.error) [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Faults: ['InvalidArgument'] [ 2494.786153] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] [ 2494.786153] env[68443]: INFO nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Terminating instance [ 2494.787138] env[68443]: DEBUG oslo_concurrency.lockutils [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a80a8b97-4d56-4702-9b02-9d115fcd6710/a80a8b97-4d56-4702-9b02-9d115fcd6710.vmdk" {{(pid=68443) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2494.787349] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2494.787606] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c94b54d-e8aa-4908-b78a-5a46a1cf2981 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.789842] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2494.790040] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2494.790797] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9ab372-9797-4645-8b7e-f1c83ef75f22 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.797791] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Unregistering the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2494.798043] env[68443]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81831f73-dad1-4197-91bd-7d0a85dafbb0 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.800473] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2494.800682] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68443) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2494.801694] env[68443]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28cecf56-7c67-452b-810f-97e8ea75f2f5 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.806447] env[68443]: DEBUG oslo_vmware.api [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Waiting for the task: (returnval){ [ 2494.806447] env[68443]: value = "session[52c27adc-de12-9155-bd91-16aa298f9564]5274ab8a-dbdf-7da8-e948-75853b01b626" [ 2494.806447] env[68443]: _type = "Task" [ 2494.806447] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2494.818317] env[68443]: DEBUG oslo_vmware.api [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Task: {'id': session[52c27adc-de12-9155-bd91-16aa298f9564]5274ab8a-dbdf-7da8-e948-75853b01b626, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2494.824782] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2494.841469] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2494.874608] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Unregistered the VM {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2494.874821] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Deleting contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2494.874940] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleting the datastore file [datastore1] da00322f-5482-4511-94a4-2e2f3705fb99 {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2494.875396] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6aa92fb-9c1c-4daa-8443-01c3bbe0d193 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2494.881490] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for the task: (returnval){ [ 2494.881490] env[68443]: value = "task-3374108" [ 2494.881490] env[68443]: _type = "Task" [ 2494.881490] env[68443]: } to complete. {{(pid=68443) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2494.889148] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2495.317271] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Preparing fetch location {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2495.317636] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating directory with path [datastore1] vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2495.317762] env[68443]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44da161a-6db5-406d-ade8-afd72f7d31ae {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.328295] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Created directory with path [datastore1] vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710 {{(pid=68443) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2495.328474] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Fetch image to [datastore1] vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk {{(pid=68443) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2495.328640] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to [datastore1] vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk on the data store datastore1 {{(pid=68443) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2495.329402] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b06f6d-ae3c-47f8-b6f1-2ee6b07ef89e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.335452] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ba3b4f-ab80-4543-8689-aa0cbd352e27 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.344132] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17849b14-986e-4890-8e5e-80862dbd0a09 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.374016] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8378e264-f7f3-4ebb-929e-294fec0deb07 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.379119] env[68443]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-246a96ae-6840-4d52-9ec5-ff59e68c20ca {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.389639] env[68443]: DEBUG oslo_vmware.api [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Task: {'id': task-3374108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075519} completed successfully. {{(pid=68443) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2495.389902] env[68443]: DEBUG nova.virt.vmwareapi.ds_util [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted the datastore file {{(pid=68443) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2495.390099] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Deleted contents of the VM from datastore datastore1 {{(pid=68443) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2495.390276] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2495.390446] env[68443]: INFO nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2495.392492] env[68443]: DEBUG nova.compute.claims [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Aborting claim: {{(pid=68443) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2495.392664] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2495.392898] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2495.399579] env[68443]: DEBUG nova.virt.vmwareapi.images [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] [instance: 83198ee3-dbb4-4088-b889-1aa9196f0b92] Downloading image file data a80a8b97-4d56-4702-9b02-9d115fcd6710 to the data store datastore1 {{(pid=68443) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2495.448759] env[68443]: DEBUG oslo_vmware.rw_handles [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2495.508795] env[68443]: DEBUG oslo_vmware.rw_handles [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Completed reading data from the image iterator. {{(pid=68443) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2495.508977] env[68443]: DEBUG oslo_vmware.rw_handles [None req-8cd5fb6d-8d8a-42fc-9883-9e0072106262 tempest-ServerDiskConfigTestJSON-1343212998 tempest-ServerDiskConfigTestJSON-1343212998-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dab6db0b-5e52-44b8-99ec-687993346b93/a80a8b97-4d56-4702-9b02-9d115fcd6710/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68443) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2495.550271] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652b25cf-91a5-4f9f-88ef-9014d3f72161 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.557953] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33423acb-d395-4b2e-b5d0-e658585e68d6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.588149] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0a58a1-d175-4d61-a21f-bc4286b44f5e {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.594728] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc52ee2-423b-4a05-8e20-aaebe0f74f04 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2495.607767] env[68443]: DEBUG nova.compute.provider_tree [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed in ProviderTree for provider: feda0f0b-e324-4b78-af74-5e6cfd355a37 {{(pid=68443) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2495.615954] env[68443]: DEBUG nova.scheduler.client.report [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Inventory has not changed for provider feda0f0b-e324-4b78-af74-5e6cfd355a37 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 105, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68443) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2495.629748] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.237s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2495.630325] env[68443]: ERROR nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2495.630325] env[68443]: Faults: ['InvalidArgument'] [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Traceback (most recent call last): [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self.driver.spawn(context, instance, image_meta, [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self._fetch_image_if_missing(context, vi) [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] image_cache(vi, tmp_image_ds_loc) [ 2495.630325] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] vm_util.copy_virtual_disk( [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] session._wait_for_task(vmdk_copy_task) [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return self.wait_for_task(task_ref) [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return evt.wait() [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] result = hub.switch() [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] return self.greenlet.switch() [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2495.630651] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] self.f(*self.args, **self.kw) [ 2495.630963] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2495.630963] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] raise exceptions.translate_fault(task_info.error) [ 2495.630963] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2495.630963] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Faults: ['InvalidArgument'] [ 2495.630963] env[68443]: ERROR nova.compute.manager [instance: da00322f-5482-4511-94a4-2e2f3705fb99] [ 2495.631347] env[68443]: DEBUG nova.compute.utils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] VimFaultException {{(pid=68443) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2495.632628] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Build of instance da00322f-5482-4511-94a4-2e2f3705fb99 was re-scheduled: A specified parameter was not correct: fileType [ 2495.632628] env[68443]: Faults: ['InvalidArgument'] {{(pid=68443) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2495.633009] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Unplugging VIFs for instance {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2495.633198] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68443) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2495.633371] env[68443]: DEBUG nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2495.633537] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2495.962055] env[68443]: DEBUG nova.network.neutron [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2495.976034] env[68443]: INFO nova.compute.manager [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Took 0.34 seconds to deallocate network for instance. [ 2496.065633] env[68443]: INFO nova.scheduler.client.report [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Deleted allocations for instance da00322f-5482-4511-94a4-2e2f3705fb99 [ 2496.095242] env[68443]: DEBUG oslo_concurrency.lockutils [None req-27a8dc0d-0e86-484c-bd2b-b12ab353f6ec tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 598.704s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2496.095242] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 402.745s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2496.095242] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Acquiring lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2496.095454] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2496.095454] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2496.096631] env[68443]: INFO nova.compute.manager [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Terminating instance [ 2496.098409] env[68443]: DEBUG nova.compute.manager [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Start destroying the instance on the hypervisor. {{(pid=68443) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2496.098409] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Destroying instance {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2496.098790] env[68443]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a22ec83-e442-48f2-81cc-cacf7ea82346 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.108425] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8db2e0-adef-4569-875a-92808d2ecad2 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2496.134918] env[68443]: WARNING nova.virt.vmwareapi.vmops [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da00322f-5482-4511-94a4-2e2f3705fb99 could not be found. [ 2496.135125] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Instance destroyed {{(pid=68443) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2496.135316] env[68443]: INFO nova.compute.manager [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2496.135559] env[68443]: DEBUG oslo.service.loopingcall [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68443) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2496.136045] env[68443]: DEBUG nova.compute.manager [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Deallocating network for instance {{(pid=68443) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2496.136150] env[68443]: DEBUG nova.network.neutron [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] deallocate_for_instance() {{(pid=68443) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2496.158518] env[68443]: DEBUG nova.network.neutron [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Updating instance_info_cache with network_info: [] {{(pid=68443) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2496.166689] env[68443]: INFO nova.compute.manager [-] [instance: da00322f-5482-4511-94a4-2e2f3705fb99] Took 0.03 seconds to deallocate network for instance. [ 2496.258388] env[68443]: DEBUG oslo_concurrency.lockutils [None req-169bbfb3-bbb1-4062-8f9e-a459bd191040 tempest-DeleteServersTestJSON-507336290 tempest-DeleteServersTestJSON-507336290-project-member] Lock "da00322f-5482-4511-94a4-2e2f3705fb99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2520.068369] env[68443]: DEBUG oslo_service.periodic_task [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68443) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2520.084891] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Getting list of instances from cluster (obj){ [ 2520.084891] env[68443]: value = "domain-c8" [ 2520.084891] env[68443]: _type = "ClusterComputeResource" [ 2520.084891] env[68443]: } {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2520.086590] env[68443]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea0148a-d8cb-4b7d-bed1-b492d840aab6 {{(pid=68443) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2520.101070] env[68443]: DEBUG nova.virt.vmwareapi.vmops [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Got total of 4 instances {{(pid=68443) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2520.101259] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 83198ee3-dbb4-4088-b889-1aa9196f0b92 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2520.101458] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid 434a8f2e-7c08-4b16-b255-45b168679f49 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2520.101622] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid d19509cf-7828-4e55-bf2b-4c57b9eab217 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2520.101792] env[68443]: DEBUG nova.compute.manager [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Triggering sync for uuid f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114 {{(pid=68443) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2520.102108] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "83198ee3-dbb4-4088-b889-1aa9196f0b92" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2520.102350] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "434a8f2e-7c08-4b16-b255-45b168679f49" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2520.102584] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "d19509cf-7828-4e55-bf2b-4c57b9eab217" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2520.102808] env[68443]: DEBUG oslo_concurrency.lockutils [None req-4f9a9785-288a-4c6a-bbe9-0ac1044ac068 None None] Acquiring lock "f3c8ee5f-a97a-46a4-82e4-ec1c55ff1114" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68443) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}}